2018-11-18 11:00:15 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
module Projects
|
|
|
|
class UpdatePagesService < BaseService
|
2018-10-15 14:42:47 +05:30
|
|
|
InvalidStateError = Class.new(StandardError)
|
2018-03-17 18:26:18 +05:30
|
|
|
FailedToExtractError = Class.new(StandardError)
|
2021-02-22 17:27:13 +05:30
|
|
|
ExclusiveLeaseTaken = Class.new(StandardError)
|
|
|
|
|
|
|
|
include ::Pages::LegacyStorageLease
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
BLOCK_SIZE = 32.kilobytes
|
2019-12-04 20:38:33 +05:30
|
|
|
PUBLIC_DIR = 'public'
|
2019-02-02 18:00:53 +05:30
|
|
|
|
|
|
|
# this has to be invalid group name,
|
|
|
|
# as it shares the namespace with groups
|
2019-12-04 20:38:33 +05:30
|
|
|
TMP_EXTRACT_PATH = '@pages.tmp'
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2021-01-29 00:20:46 +05:30
|
|
|
# old deployment can be cached by pages daemon
|
|
|
|
# so we need to give pages daemon some time update cache
|
|
|
|
# 10 minutes is enough, but 30 feels safer
|
|
|
|
OLD_DEPLOYMENTS_DESTRUCTION_DELAY = 30.minutes.freeze
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
attr_reader :build
|
|
|
|
|
|
|
|
def initialize(project, build)
|
2021-04-29 21:17:54 +05:30
|
|
|
@project = project
|
|
|
|
@build = build
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
2018-03-17 18:26:18 +05:30
|
|
|
register_attempt
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
# Create status notifying the deployment of pages
|
2021-09-30 23:02:18 +05:30
|
|
|
@status = build_commit_status
|
|
|
|
::Ci::Pipelines::AddJobService.new(@build.pipeline).execute!(@status) do |job|
|
|
|
|
job.enqueue!
|
|
|
|
job.run!
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
|
2020-04-08 14:13:33 +05:30
|
|
|
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
build.artifacts_file.use_file do |artifacts_path|
|
|
|
|
deploy_to_legacy_storage(artifacts_path)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
create_pages_deployment(artifacts_path, build)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
success
|
|
|
|
end
|
2018-10-15 14:42:47 +05:30
|
|
|
rescue InvalidStateError => e
|
2017-08-17 22:00:37 +05:30
|
|
|
error(e.message)
|
2021-06-08 01:23:25 +05:30
|
|
|
rescue StandardError => e
|
2018-10-15 14:42:47 +05:30
|
|
|
error(e.message)
|
2018-05-01 15:08:00 +05:30
|
|
|
raise e
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def success
|
|
|
|
@status.success
|
2020-11-24 15:15:51 +05:30
|
|
|
@project.mark_pages_as_deployed(artifacts_archive: build.job_artifacts_archive)
|
2017-08-17 22:00:37 +05:30
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
def error(message)
|
2018-05-01 15:08:00 +05:30
|
|
|
register_failure
|
2017-08-17 22:00:37 +05:30
|
|
|
log_error("Projects::UpdatePagesService: #{message}")
|
|
|
|
@status.allow_failure = !latest?
|
|
|
|
@status.description = message
|
2018-03-17 18:26:18 +05:30
|
|
|
@status.drop(:script_failure)
|
2017-08-17 22:00:37 +05:30
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2021-09-30 23:02:18 +05:30
|
|
|
def build_commit_status
|
2017-08-17 22:00:37 +05:30
|
|
|
GenericCommitStatus.new(
|
|
|
|
user: build.user,
|
|
|
|
stage: 'deploy',
|
|
|
|
name: 'pages:deploy'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
def deploy_to_legacy_storage(artifacts_path)
|
2021-03-11 19:13:27 +05:30
|
|
|
# path today used by one project can later be used by another
|
|
|
|
# so we can't really scope this feature flag by project or group
|
2021-04-29 21:17:54 +05:30
|
|
|
return unless ::Settings.pages.local_store.enabled
|
|
|
|
|
|
|
|
return if Feature.enabled?(:skip_pages_deploy_to_legacy_storage, project, default_enabled: :yaml)
|
2021-03-11 19:13:27 +05:30
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
# Create temporary directory in which we will extract the artifacts
|
|
|
|
make_secure_tmp_dir(tmp_path) do |tmp_path|
|
|
|
|
extract_archive!(artifacts_path, tmp_path)
|
|
|
|
|
|
|
|
# Check if we did extract public directory
|
|
|
|
archive_public_path = File.join(tmp_path, PUBLIC_DIR)
|
|
|
|
raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
|
|
|
|
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
|
|
|
|
|
|
|
|
deploy_page!(archive_public_path)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_archive!(artifacts_path, temp_path)
|
2018-05-09 12:01:36 +05:30
|
|
|
if artifacts.ends_with?('.zip')
|
2021-03-08 18:12:59 +05:30
|
|
|
extract_zip_archive!(artifacts_path, temp_path)
|
2017-08-17 22:00:37 +05:30
|
|
|
else
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, 'unsupported artifacts format'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
def extract_zip_archive!(artifacts_path, temp_path)
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
# Calculate page size after extract
|
2019-02-02 18:00:53 +05:30
|
|
|
public_entry = build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
if public_entry.total_size > max_size
|
2018-10-15 14:42:47 +05:30
|
|
|
raise InvalidStateError, "artifacts for pages are too large: #{public_entry.total_size}"
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
SafeZip::Extract.new(artifacts_path)
|
|
|
|
.extract(directories: [PUBLIC_DIR], to: temp_path)
|
2019-02-02 18:00:53 +05:30
|
|
|
rescue SafeZip::Extract::Error => e
|
|
|
|
raise FailedToExtractError, e.message
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def deploy_page!(archive_public_path)
|
2021-02-22 17:27:13 +05:30
|
|
|
deployed = try_obtain_lease do
|
|
|
|
deploy_page_unsafe!(archive_public_path)
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
unless deployed
|
|
|
|
raise ExclusiveLeaseTaken, "Failed to deploy pages - other deployment is in progress"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def deploy_page_unsafe!(archive_public_path)
|
2017-08-17 22:00:37 +05:30
|
|
|
# Do atomic move of pages
|
|
|
|
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
|
|
|
|
# 1. We move deployed public to previous public path (file removal is slow)
|
|
|
|
# 2. We move temporary public to be deployed public
|
|
|
|
# 3. We remove previous public path
|
|
|
|
FileUtils.mkdir_p(pages_path)
|
|
|
|
begin
|
|
|
|
FileUtils.move(public_path, previous_public_path)
|
2021-06-08 01:23:25 +05:30
|
|
|
rescue StandardError
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
FileUtils.move(archive_public_path, public_path)
|
|
|
|
ensure
|
|
|
|
FileUtils.rm_r(previous_public_path, force: true)
|
|
|
|
end
|
|
|
|
|
2021-01-29 00:20:46 +05:30
|
|
|
def create_pages_deployment(artifacts_path, build)
|
|
|
|
# we're using the full archive and pages daemon needs to read it
|
|
|
|
# so we want the total count from entries, not only "public/" directory
|
|
|
|
# because it better approximates work we need to do before we can serve the site
|
|
|
|
entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
|
|
|
|
sha256 = build.job_artifacts_archive.file_sha256
|
2021-01-03 14:25:43 +05:30
|
|
|
|
2021-01-29 00:20:46 +05:30
|
|
|
deployment = nil
|
2021-01-03 14:25:43 +05:30
|
|
|
File.open(artifacts_path) do |file|
|
2021-01-29 00:20:46 +05:30
|
|
|
deployment = project.pages_deployments.create!(file: file,
|
|
|
|
file_count: entries_count,
|
|
|
|
file_sha256: sha256)
|
2021-03-08 18:12:59 +05:30
|
|
|
|
|
|
|
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
|
|
|
|
|
2021-01-29 00:20:46 +05:30
|
|
|
project.update_pages_deployment!(deployment)
|
2021-01-03 14:25:43 +05:30
|
|
|
end
|
|
|
|
|
2021-01-29 00:20:46 +05:30
|
|
|
DestroyPagesDeploymentsWorker.perform_in(
|
|
|
|
OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
|
|
|
|
project.id,
|
|
|
|
deployment.id
|
|
|
|
)
|
2021-01-03 14:25:43 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def latest?
|
|
|
|
# check if sha for the ref is still the most recent one
|
|
|
|
# this helps in case when multiple deployments happens
|
|
|
|
sha == latest_sha
|
|
|
|
end
|
|
|
|
|
|
|
|
def blocks
|
|
|
|
# Calculate dd parameters: we limit the size of pages
|
|
|
|
1 + max_size / BLOCK_SIZE
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
def max_size_from_settings
|
|
|
|
Gitlab::CurrentSettings.max_pages_size.megabytes
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def max_size
|
2020-03-13 15:44:24 +05:30
|
|
|
max_pages_size = max_size_from_settings
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2020-10-24 23:57:45 +05:30
|
|
|
return ::Gitlab::Pages::MAX_SIZE if max_pages_size == 0
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
max_pages_size
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def tmp_path
|
2019-02-02 18:00:53 +05:30
|
|
|
@tmp_path ||= File.join(::Settings.pages.path, TMP_EXTRACT_PATH)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def pages_path
|
|
|
|
@pages_path ||= project.pages_path
|
|
|
|
end
|
|
|
|
|
|
|
|
def public_path
|
2019-02-02 18:00:53 +05:30
|
|
|
@public_path ||= File.join(pages_path, PUBLIC_DIR)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def previous_public_path
|
2019-02-02 18:00:53 +05:30
|
|
|
@previous_public_path ||= File.join(pages_path, "#{PUBLIC_DIR}.#{SecureRandom.hex}")
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def ref
|
|
|
|
build.ref
|
|
|
|
end
|
|
|
|
|
|
|
|
def artifacts
|
|
|
|
build.artifacts_file.path
|
|
|
|
end
|
|
|
|
|
|
|
|
def latest_sha
|
|
|
|
project.commit(build.ref).try(:sha).to_s
|
2018-05-01 15:08:00 +05:30
|
|
|
ensure
|
|
|
|
# Close any file descriptors that were opened and free libgit2 buffers
|
|
|
|
project.cleanup
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def sha
|
|
|
|
build.sha
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
def register_attempt
|
|
|
|
pages_deployments_total_counter.increment
|
|
|
|
end
|
|
|
|
|
|
|
|
def register_failure
|
|
|
|
pages_deployments_failed_total_counter.increment
|
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_total_counter
|
|
|
|
@pages_deployments_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_total, "Counter of GitLab Pages deployments triggered")
|
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_failed_total_counter
|
|
|
|
@pages_deployments_failed_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_failed_total, "Counter of GitLab Pages deployments which failed")
|
|
|
|
end
|
2019-02-02 18:00:53 +05:30
|
|
|
|
|
|
|
def make_secure_tmp_dir(tmp_path)
|
|
|
|
FileUtils.mkdir_p(tmp_path)
|
2021-04-29 21:17:54 +05:30
|
|
|
path = Dir.mktmpdir(tmp_dir_prefix, tmp_path)
|
2019-02-02 18:00:53 +05:30
|
|
|
begin
|
|
|
|
yield(path)
|
|
|
|
ensure
|
|
|
|
FileUtils.remove_entry_secure(path)
|
|
|
|
end
|
|
|
|
end
|
2021-04-29 21:17:54 +05:30
|
|
|
|
|
|
|
def tmp_dir_prefix
|
|
|
|
"project-#{project.id}-build-#{build.id}-"
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
|
2021-06-08 01:23:25 +05:30
|
|
|
Projects::UpdatePagesService.prepend_mod_with('Projects::UpdatePagesService')
|