2018-11-18 11:00:15 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
module Projects
|
|
|
|
class UpdatePagesService < BaseService
|
2018-10-15 14:42:47 +05:30
|
|
|
InvalidStateError = Class.new(StandardError)
|
2018-03-17 18:26:18 +05:30
|
|
|
FailedToExtractError = Class.new(StandardError)
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
BLOCK_SIZE = 32.kilobytes
|
|
|
|
MAX_SIZE = 1.terabyte
|
2019-12-04 20:38:33 +05:30
|
|
|
PUBLIC_DIR = 'public'
|
2019-02-02 18:00:53 +05:30
|
|
|
|
|
|
|
# this has to be invalid group name,
|
|
|
|
# as it shares the namespace with groups
|
2019-12-04 20:38:33 +05:30
|
|
|
TMP_EXTRACT_PATH = '@pages.tmp'
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
attr_reader :build
|
|
|
|
|
|
|
|
def initialize(project, build)
|
|
|
|
@project, @build = project, build
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
2018-03-17 18:26:18 +05:30
|
|
|
register_attempt
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
# Create status notifying the deployment of pages
|
|
|
|
@status = create_status
|
|
|
|
@status.enqueue!
|
|
|
|
@status.run!
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
|
|
|
|
raise InvalidStateError, 'pages are outdated' unless latest?
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
# Create temporary directory in which we will extract the artifacts
|
2019-02-02 18:00:53 +05:30
|
|
|
make_secure_tmp_dir(tmp_path) do |archive_path|
|
2017-08-17 22:00:37 +05:30
|
|
|
extract_archive!(archive_path)
|
|
|
|
|
|
|
|
# Check if we did extract public directory
|
2019-02-02 18:00:53 +05:30
|
|
|
archive_public_path = File.join(archive_path, PUBLIC_DIR)
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
|
|
|
|
raise InvalidStateError, 'pages are outdated' unless latest?
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
deploy_page!(archive_public_path)
|
|
|
|
success
|
|
|
|
end
|
2018-10-15 14:42:47 +05:30
|
|
|
rescue InvalidStateError => e
|
2017-08-17 22:00:37 +05:30
|
|
|
error(e.message)
|
2018-05-01 15:08:00 +05:30
|
|
|
rescue => e
|
2018-10-15 14:42:47 +05:30
|
|
|
error(e.message)
|
2018-05-01 15:08:00 +05:30
|
|
|
raise e
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def success
|
|
|
|
@status.success
|
2019-12-21 20:55:43 +05:30
|
|
|
@project.mark_pages_as_deployed
|
2017-08-17 22:00:37 +05:30
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
def error(message)
|
2018-05-01 15:08:00 +05:30
|
|
|
register_failure
|
2017-08-17 22:00:37 +05:30
|
|
|
log_error("Projects::UpdatePagesService: #{message}")
|
|
|
|
@status.allow_failure = !latest?
|
|
|
|
@status.description = message
|
2018-03-17 18:26:18 +05:30
|
|
|
@status.drop(:script_failure)
|
2017-08-17 22:00:37 +05:30
|
|
|
super
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_status
|
|
|
|
GenericCommitStatus.new(
|
|
|
|
project: project,
|
|
|
|
pipeline: build.pipeline,
|
|
|
|
user: build.user,
|
|
|
|
ref: build.ref,
|
|
|
|
stage: 'deploy',
|
|
|
|
name: 'pages:deploy'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_archive!(temp_path)
|
2018-05-09 12:01:36 +05:30
|
|
|
if artifacts.ends_with?('.zip')
|
2017-08-17 22:00:37 +05:30
|
|
|
extract_zip_archive!(temp_path)
|
|
|
|
else
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, 'unsupported artifacts format'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_zip_archive!(temp_path)
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
# Calculate page size after extract
|
2019-02-02 18:00:53 +05:30
|
|
|
public_entry = build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
if public_entry.total_size > max_size
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, "artifacts for pages are too large: #{public_entry.total_size}"
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
build.artifacts_file.use_file do |artifacts_path|
|
2019-02-02 18:00:53 +05:30
|
|
|
SafeZip::Extract.new(artifacts_path)
|
|
|
|
.extract(directories: [PUBLIC_DIR], to: temp_path)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2019-02-02 18:00:53 +05:30
|
|
|
rescue SafeZip::Extract::Error => e
|
|
|
|
raise FailedToExtractError, e.message
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def deploy_page!(archive_public_path)
|
|
|
|
# Do atomic move of pages
|
|
|
|
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
|
|
|
|
# 1. We move deployed public to previous public path (file removal is slow)
|
|
|
|
# 2. We move temporary public to be deployed public
|
|
|
|
# 3. We remove previous public path
|
|
|
|
FileUtils.mkdir_p(pages_path)
|
|
|
|
begin
|
|
|
|
FileUtils.move(public_path, previous_public_path)
|
|
|
|
rescue
|
|
|
|
end
|
|
|
|
FileUtils.move(archive_public_path, public_path)
|
|
|
|
ensure
|
|
|
|
FileUtils.rm_r(previous_public_path, force: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
def latest?
|
|
|
|
# check if sha for the ref is still the most recent one
|
|
|
|
# this helps in case when multiple deployments happens
|
|
|
|
sha == latest_sha
|
|
|
|
end
|
|
|
|
|
|
|
|
def blocks
|
|
|
|
# Calculate dd parameters: we limit the size of pages
|
|
|
|
1 + max_size / BLOCK_SIZE
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_size
|
2018-03-17 18:26:18 +05:30
|
|
|
max_pages_size = Gitlab::CurrentSettings.max_pages_size.megabytes
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
return MAX_SIZE if max_pages_size.zero?
|
|
|
|
|
|
|
|
[max_pages_size, MAX_SIZE].min
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def tmp_path
|
2019-02-02 18:00:53 +05:30
|
|
|
@tmp_path ||= File.join(::Settings.pages.path, TMP_EXTRACT_PATH)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def pages_path
|
|
|
|
@pages_path ||= project.pages_path
|
|
|
|
end
|
|
|
|
|
|
|
|
def public_path
|
2019-02-02 18:00:53 +05:30
|
|
|
@public_path ||= File.join(pages_path, PUBLIC_DIR)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def previous_public_path
|
2019-02-02 18:00:53 +05:30
|
|
|
@previous_public_path ||= File.join(pages_path, "#{PUBLIC_DIR}.#{SecureRandom.hex}")
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def ref
|
|
|
|
build.ref
|
|
|
|
end
|
|
|
|
|
|
|
|
def artifacts
|
|
|
|
build.artifacts_file.path
|
|
|
|
end
|
|
|
|
|
|
|
|
def latest_sha
|
|
|
|
project.commit(build.ref).try(:sha).to_s
|
2018-05-01 15:08:00 +05:30
|
|
|
ensure
|
|
|
|
# Close any file descriptors that were opened and free libgit2 buffers
|
|
|
|
project.cleanup
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def sha
|
|
|
|
build.sha
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
def register_attempt
|
|
|
|
pages_deployments_total_counter.increment
|
|
|
|
end
|
|
|
|
|
|
|
|
def register_failure
|
|
|
|
pages_deployments_failed_total_counter.increment
|
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_total_counter
|
|
|
|
@pages_deployments_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_total, "Counter of GitLab Pages deployments triggered")
|
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_failed_total_counter
|
|
|
|
@pages_deployments_failed_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_failed_total, "Counter of GitLab Pages deployments which failed")
|
|
|
|
end
|
2019-02-02 18:00:53 +05:30
|
|
|
|
|
|
|
def make_secure_tmp_dir(tmp_path)
|
|
|
|
FileUtils.mkdir_p(tmp_path)
|
|
|
|
path = Dir.mktmpdir(nil, tmp_path)
|
|
|
|
begin
|
|
|
|
yield(path)
|
|
|
|
ensure
|
|
|
|
FileUtils.remove_entry_secure(path)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|