Update upstream source from tag 'upstream/16.0.8+ds1'

Update to upstream version '16.0.8+ds1'
with Debian dir 0258e42959
This commit is contained in:
Aravinth Manivannan 2023-09-09 11:38:58 +00:00
commit 3816fce38f
65 changed files with 1338 additions and 534 deletions

View file

@ -5126,7 +5126,6 @@ RSpec/MissingFeatureCategory:
- 'spec/policies/ci/bridge_policy_spec.rb' - 'spec/policies/ci/bridge_policy_spec.rb'
- 'spec/policies/ci/build_policy_spec.rb' - 'spec/policies/ci/build_policy_spec.rb'
- 'spec/policies/ci/pipeline_policy_spec.rb' - 'spec/policies/ci/pipeline_policy_spec.rb'
- 'spec/policies/ci/pipeline_schedule_policy_spec.rb'
- 'spec/policies/ci/trigger_policy_spec.rb' - 'spec/policies/ci/trigger_policy_spec.rb'
- 'spec/policies/clusters/agent_policy_spec.rb' - 'spec/policies/clusters/agent_policy_spec.rb'
- 'spec/policies/clusters/agent_token_policy_spec.rb' - 'spec/policies/clusters/agent_token_policy_spec.rb'

View file

@ -2,6 +2,28 @@
documentation](doc/development/changelog.md) for instructions on adding your own documentation](doc/development/changelog.md) for instructions on adding your own
entry. entry.
## 16.0.8 (2023-08-01)
### Fixed (1 change)
- [Disable IAT verification by default](gitlab-org/security/gitlab@6d17a50539b8518da18bc68accc03b48d73173a0)
### Security (13 changes)
- [Prevent leaking emails of newly created users](gitlab-org/security/gitlab@b2872b398599cd7ee20c4119ae4c8e6ba2a6882d) ([merge request](gitlab-org/security/gitlab!3451))
- [Added redirect to filtered params](gitlab-org/security/gitlab@49ffc2cc98af0e66305c8a653c74e0b92ee06ce8) ([merge request](gitlab-org/security/gitlab!3443))
- [Relocate PlantUML config and disable SVG support](gitlab-org/security/gitlab@c6ded17a7d17ec8c3ed55cb94b8e6e524b6bbd5e) ([merge request](gitlab-org/security/gitlab!3440))
- [Sanitize multiple hardlinks from import archives](gitlab-org/security/gitlab@9dabd8ebca50d8ea3781a0c4955a40cd07c453e7) ([merge request](gitlab-org/security/gitlab!3437))
- [Validates project path availability](gitlab-org/security/gitlab@97e6ce4d15c8f4bcc7f60a560b789a023d391531) ([merge request](gitlab-org/security/gitlab!3428))
- [Fix policy project assign](gitlab-org/security/gitlab@c1cca8ce8f24f6466563a50463e3254c5c423e97) ([merge request](gitlab-org/security/gitlab!3425))
- [Fix pipeline schedule authorization for protected branch/tag](gitlab-org/security/gitlab@0c7017d993a33ef9fc693d4435505a4aea0141d1) ([merge request](gitlab-org/security/gitlab!3363))
- [Mitigate autolink filter ReDOS](gitlab-org/security/gitlab@9072c630608a81645548b64b32d9f81bd258ba06) ([merge request](gitlab-org/security/gitlab!3432))
- [Fix XSS vector in Web IDE](gitlab-org/security/gitlab@2832d1ae3b3e1bfc42bbeaeb29841a1e5fecac8a) ([merge request](gitlab-org/security/gitlab!3411))
- [Mitigate project reference filter ReDOS](gitlab-org/security/gitlab@9c73619acaad3eb3605bf632f066bcee59b86566) ([merge request](gitlab-org/security/gitlab!3429))
- [Add a stricter regex for the Harbor search param](gitlab-org/security/gitlab@c27e5e48a02d3411e84617b4fb7fd3f0fb49b618) ([merge request](gitlab-org/security/gitlab!3396))
- [Update pipeline user to the last policy MR author](gitlab-org/security/gitlab@b1e9bcb33106ba7e279d5fd42c4f2c1727629f63) ([merge request](gitlab-org/security/gitlab!3393))
- [Prohibit 40 character hex plus a hyphen if branch name is path](gitlab-org/security/gitlab@66c81ff6b50d0e53fc1f1b153439ad95614c9d09) ([merge request](gitlab-org/security/gitlab!3406))
## 16.0.7 (2023-07-04) ## 16.0.7 (2023-07-04)
### Security (1 change) ### Security (1 change)

View file

@ -1 +1 @@
16.0.7 16.0.8

View file

@ -1 +1 @@
16.0.7 16.0.8

View file

@ -1 +1 @@
16.0.7 16.0.8

View file

@ -21,7 +21,6 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
end end
def new def new
@schedule = project.pipeline_schedules.new
end end
def create def create
@ -102,6 +101,15 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
variables_attributes: [:id, :variable_type, :key, :secret_value, :_destroy]) variables_attributes: [:id, :variable_type, :key, :secret_value, :_destroy])
end end
def new_schedule
# We need the `ref` here for `authorize_create_pipeline_schedule!`
@schedule ||= project.pipeline_schedules.new(ref: params.dig(:schedule, :ref))
end
def authorize_create_pipeline_schedule!
return access_denied! unless can?(current_user, :create_pipeline_schedule, new_schedule)
end
def authorize_play_pipeline_schedule! def authorize_play_pipeline_schedule!
return access_denied! unless can?(current_user, :play_pipeline_schedule, schedule) return access_denied! unless can?(current_user, :play_pipeline_schedule, schedule)
end end

View file

@ -584,6 +584,8 @@ class Project < ApplicationRecord
validates :max_artifacts_size, numericality: { only_integer: true, greater_than: 0, allow_nil: true } validates :max_artifacts_size, numericality: { only_integer: true, greater_than: 0, allow_nil: true }
validates :suggestion_commit_message, length: { maximum: MAX_SUGGESTIONS_TEMPLATE_LENGTH } validates :suggestion_commit_message, length: { maximum: MAX_SUGGESTIONS_TEMPLATE_LENGTH }
validate :path_availability, if: :path_changed?
# Scopes # Scopes
scope :pending_delete, -> { where(pending_delete: true) } scope :pending_delete, -> { where(pending_delete: true) }
scope :without_deleted, -> { where(pending_delete: false) } scope :without_deleted, -> { where(pending_delete: false) }
@ -3180,6 +3182,15 @@ class Project < ApplicationRecord
end end
strong_memoize_attr :frozen_outbound_job_token_scopes? strong_memoize_attr :frozen_outbound_job_token_scopes?
def path_availability
base, _, host = path.partition('.')
return unless host == Gitlab.config.pages&.dig('host')
return unless ProjectSetting.where(pages_unique_domain: base).exists?
errors.add(:path, s_('Project|already in use'))
end
private private
def pages_unique_domain_enabled? def pages_unique_domain_enabled?

View file

@ -52,6 +52,8 @@ class ProjectSetting < ApplicationRecord
validate :validates_mr_default_target_self validate :validates_mr_default_target_self
validate :pages_unique_domain_availability, if: :pages_unique_domain_changed?
attribute :legacy_open_source_license_available, default: -> do attribute :legacy_open_source_license_available, default: -> do
Feature.enabled?(:legacy_open_source_license_available, type: :ops) Feature.enabled?(:legacy_open_source_license_available, type: :ops)
end end
@ -102,6 +104,15 @@ class ProjectSetting < ApplicationRecord
pages_unique_domain_enabled || pages_unique_domain_enabled ||
pages_unique_domain_in_database.present? pages_unique_domain_in_database.present?
end end
def pages_unique_domain_availability
host = Gitlab.config.pages&.dig('host')
return if host.blank?
return unless Project.where(path: "#{pages_unique_domain}.#{host}").exists?
errors.add(:pages_unique_domain, s_('ProjectSetting|already in use'))
end
end end
ProjectSetting.prepend_mod ProjectSetting.prepend_mod

View file

@ -5,7 +5,18 @@ module Ci
alias_method :pipeline_schedule, :subject alias_method :pipeline_schedule, :subject
condition(:protected_ref) do condition(:protected_ref) do
ref_protected?(@user, @subject.project, @subject.project.repository.tag_exists?(@subject.ref), @subject.ref) if full_ref?(@subject.ref)
is_tag = Gitlab::Git.tag_ref?(@subject.ref)
ref_name = Gitlab::Git.ref_name(@subject.ref)
else
# NOTE: this block should not be removed
# until the full ref validation is in place
# and all old refs are updated and validated
is_tag = @subject.project.repository.tag_exists?(@subject.ref)
ref_name = @subject.ref
end
ref_protected?(@user, @subject.project, is_tag, ref_name)
end end
condition(:owner_of_schedule) do condition(:owner_of_schedule) do
@ -31,6 +42,15 @@ module Ci
enable :take_ownership_pipeline_schedule enable :take_ownership_pipeline_schedule
end end
rule { protected_ref }.prevent :play_pipeline_schedule rule { protected_ref }.policy do
prevent :play_pipeline_schedule
prevent :create_pipeline_schedule
end
private
def full_ref?(ref)
Gitlab::Git.tag_ref?(ref) || Gitlab::Git.branch_ref?(ref)
end
end end
end end

View file

@ -49,11 +49,7 @@ module BulkImports
end end
def validate_symlink def validate_symlink
raise(BulkImports::Error, 'Invalid file') if symlink?(filepath) raise(BulkImports::Error, 'Invalid file') if Gitlab::Utils::FileInfo.linked?(filepath)
end
def symlink?(filepath)
File.lstat(filepath).symlink?
end end
def extract_archive def extract_archive

View file

@ -53,7 +53,7 @@ module BulkImports
end end
def validate_symlink(filepath) def validate_symlink(filepath)
raise(ServiceError, 'Invalid file') if File.lstat(filepath).symlink? raise(ServiceError, 'Invalid file') if Gitlab::Utils::FileInfo.linked?(filepath)
end end
def decompress_file def decompress_file

View file

@ -171,6 +171,7 @@ module Gitlab
# - Any parameter containing `password` # - Any parameter containing `password`
# - Any parameter containing `secret` # - Any parameter containing `secret`
# - Any parameter ending with `key` # - Any parameter ending with `key`
# - Any parameter named `redirect`, filtered for security concerns of exposing sensitive information
# - Two-factor tokens (:otp_attempt) # - Two-factor tokens (:otp_attempt)
# - Repo/Project Import URLs (:import_url) # - Repo/Project Import URLs (:import_url)
# - Build traces (:trace) # - Build traces (:trace)
@ -213,6 +214,7 @@ module Gitlab
variables variables
content content
sharedSecret sharedSecret
redirect
) )
# Enable escaping HTML in JSON. # Enable escaping HTML in JSON.

View file

@ -34,8 +34,13 @@ module Banzai
# https://github.com/vmg/rinku/blob/v2.0.1/ext/rinku/autolink.c#L65 # https://github.com/vmg/rinku/blob/v2.0.1/ext/rinku/autolink.c#L65
# #
# Rubular: http://rubular.com/r/nrL3r9yUiq # Rubular: http://rubular.com/r/nrL3r9yUiq
# Note that it's not possible to use Gitlab::UntrustedRegexp for LINK_PATTERN,
# as `(?<!` is unsupported in `re2`, see https://github.com/google/re2/wiki/Syntax
LINK_PATTERN = %r{([a-z][a-z0-9\+\.-]+://[^\s>]+)(?<!\?|!|\.|,|:)}.freeze LINK_PATTERN = %r{([a-z][a-z0-9\+\.-]+://[^\s>]+)(?<!\?|!|\.|,|:)}.freeze
ENTITY_UNTRUSTED = '((?:&[\w#]+;)+)\z'
ENTITY_UNTRUSTED_REGEX = Gitlab::UntrustedRegexp.new(ENTITY_UNTRUSTED, multiline: false)
# Text matching LINK_PATTERN inside these elements will not be linked # Text matching LINK_PATTERN inside these elements will not be linked
IGNORE_PARENTS = %w(a code kbd pre script style).to_set IGNORE_PARENTS = %w(a code kbd pre script style).to_set
@ -85,10 +90,14 @@ module Banzai
# Remove any trailing HTML entities and store them for appending # Remove any trailing HTML entities and store them for appending
# outside the link element. The entity must be marked HTML safe in # outside the link element. The entity must be marked HTML safe in
# order to be output literally rather than escaped. # order to be output literally rather than escaped.
match.gsub!(/((?:&[\w#]+;)+)\z/, '') dropped = ''
dropped = (Regexp.last_match(1) || '').html_safe match = ENTITY_UNTRUSTED_REGEX.replace_gsub(match) do |entities|
dropped = entities[1].html_safe
# To match the behaviour of Rinku, if the matched link ends with a ''
end
# To match the behavior of Rinku, if the matched link ends with a
# closing part of a matched pair of punctuation, we remove that trailing # closing part of a matched pair of punctuation, we remove that trailing
# character unless there are an equal number of closing and opening # character unless there are an equal number of closing and opening
# characters in the link. # characters in the link.

View file

@ -11,7 +11,7 @@ module Banzai
def call def call
return doc unless settings.plantuml_enabled? && doc.at_xpath(lang_tag) return doc unless settings.plantuml_enabled? && doc.at_xpath(lang_tag)
plantuml_setup Gitlab::Plantuml.configure
doc.xpath(lang_tag).each do |node| doc.xpath(lang_tag).each do |node|
img_tag = Nokogiri::HTML::DocumentFragment.parse( img_tag = Nokogiri::HTML::DocumentFragment.parse(
@ -38,15 +38,6 @@ module Banzai
def settings def settings
Gitlab::CurrentSettings.current_application_settings Gitlab::CurrentSettings.current_application_settings
end end
def plantuml_setup
Asciidoctor::PlantUml.configure do |conf|
conf.url = settings.plantuml_url
conf.png_enable = settings.plantuml_enabled
conf.svg_enable = false
conf.txt_enable = false
end
end
end end
end end
end end

View file

@ -24,7 +24,7 @@ module BulkImports
return if tar_filepath?(file_path) return if tar_filepath?(file_path)
return if lfs_json_filepath?(file_path) return if lfs_json_filepath?(file_path)
return if File.directory?(file_path) return if File.directory?(file_path)
return if File.lstat(file_path).symlink? return if Gitlab::Utils::FileInfo.linked?(file_path)
size = File.size(file_path) size = File.size(file_path)
oid = LfsObject.calculate_oid(file_path) oid = LfsObject.calculate_oid(file_path)

View file

@ -24,7 +24,7 @@ module BulkImports
# Validate that the path is OK to load # Validate that the path is OK to load
Gitlab::Utils.check_allowed_absolute_path_and_path_traversal!(file_path, [Dir.tmpdir]) Gitlab::Utils.check_allowed_absolute_path_and_path_traversal!(file_path, [Dir.tmpdir])
return if File.directory?(file_path) return if File.directory?(file_path)
return if File.lstat(file_path).symlink? return if Gitlab::Utils::FileInfo.linked?(file_path)
avatar_path = AVATAR_PATTERN.match(file_path) avatar_path = AVATAR_PATTERN.match(file_path)
return save_avatar(file_path) if avatar_path return save_avatar(file_path) if avatar_path

View file

@ -32,7 +32,7 @@ module BulkImports
end end
def validate_symlink def validate_symlink
return unless File.lstat(filepath).symlink? return unless Gitlab::Utils::FileInfo.linked?(filepath)
File.delete(filepath) File.delete(filepath)
raise_error 'Invalid downloaded file' raise_error 'Invalid downloaded file'

View file

@ -26,7 +26,7 @@ module BulkImports
return unless portable.lfs_enabled? return unless portable.lfs_enabled?
return unless File.exist?(bundle_path) return unless File.exist?(bundle_path)
return if File.directory?(bundle_path) return if File.directory?(bundle_path)
return if File.lstat(bundle_path).symlink? return if Gitlab::Utils::FileInfo.linked?(bundle_path)
portable.design_repository.create_from_bundle(bundle_path) portable.design_repository.create_from_bundle(bundle_path)
end end

View file

@ -26,7 +26,7 @@ module BulkImports
return unless File.exist?(bundle_path) return unless File.exist?(bundle_path)
return if File.directory?(bundle_path) return if File.directory?(bundle_path)
return if File.lstat(bundle_path).symlink? return if Gitlab::Utils::FileInfo.linked?(bundle_path)
portable.repository.create_from_bundle(bundle_path) portable.repository.create_from_bundle(bundle_path)
end end

View file

@ -77,20 +77,11 @@ module Gitlab
context[:pipeline] = :ascii_doc context[:pipeline] = :ascii_doc
context[:max_includes] = [MAX_INCLUDES, context[:max_includes]].compact.min context[:max_includes] = [MAX_INCLUDES, context[:max_includes]].compact.min
plantuml_setup Gitlab::Plantuml.configure
html = ::Asciidoctor.convert(input, asciidoc_opts) html = ::Asciidoctor.convert(input, asciidoc_opts)
html = Banzai.render(html, context) html = Banzai.render(html, context)
html.html_safe html.html_safe
end end
def self.plantuml_setup
Asciidoctor::PlantUml.configure do |conf|
conf.url = Gitlab::CurrentSettings.plantuml_url
conf.svg_enable = Gitlab::CurrentSettings.plantuml_enabled
conf.png_enable = Gitlab::CurrentSettings.plantuml_enabled
conf.txt_enable = false
end
end
end end
end end

View file

@ -42,7 +42,7 @@ module Gitlab
def prohibited_branch_checks def prohibited_branch_checks
return if deletion? return if deletion?
if branch_name =~ %r{\A\h{40}(/|\z)} if branch_name =~ %r{\A\h{40}(-/|/|\z)}
raise GitAccess::ForbiddenError, ERROR_MESSAGES[:prohibited_hex_branch_name] raise GitAccess::ForbiddenError, ERROR_MESSAGES[:prohibited_hex_branch_name]
end end
end end

View file

@ -65,7 +65,7 @@ module Gitlab
def validate_archive_path def validate_archive_path
Gitlab::Utils.check_path_traversal!(archive_path) Gitlab::Utils.check_path_traversal!(archive_path)
raise(ServiceError, 'Archive path is a symlink') if File.lstat(archive_path).symlink? raise(ServiceError, 'Archive path is a symlink or hard link') if Gitlab::Utils::FileInfo.linked?(archive_path)
raise(ServiceError, 'Archive path is not a file') unless File.file?(archive_path) raise(ServiceError, 'Archive path is not a file') unless File.file?(archive_path)
end end

View file

@ -25,7 +25,7 @@ module Gitlab
message: 'params invalid' message: 'params invalid'
}, allow_blank: true }, allow_blank: true
validates :search, format: { validates :search, format: {
with: /\A([a-z\_]*=[a-zA-Z0-9\- :]*,*)*\z/, with: /\A(name=[a-zA-Z0-9\-:]+(?:,name=[a-zA-Z0-9\-:]+)*)\z/,
message: 'params invalid' message: 'params invalid'
}, allow_blank: true }, allow_blank: true

View file

@ -5,8 +5,11 @@ module Gitlab
module CommandLineUtil module CommandLineUtil
UNTAR_MASK = 'u+rwX,go+rX,go-w' UNTAR_MASK = 'u+rwX,go+rX,go-w'
DEFAULT_DIR_MODE = 0700 DEFAULT_DIR_MODE = 0700
CLEAN_DIR_IGNORE_FILE_NAMES = %w[. ..].freeze
FileOversizedError = Class.new(StandardError) CommandLineUtilError = Class.new(StandardError)
FileOversizedError = Class.new(CommandLineUtilError)
HardLinkError = Class.new(CommandLineUtilError)
def tar_czf(archive:, dir:) def tar_czf(archive:, dir:)
tar_with_options(archive: archive, dir: dir, options: 'czf') tar_with_options(archive: archive, dir: dir, options: 'czf')
@ -90,7 +93,7 @@ module Gitlab
def untar_with_options(archive:, dir:, options:) def untar_with_options(archive:, dir:, options:)
execute_cmd(%W(tar -#{options} #{archive} -C #{dir})) execute_cmd(%W(tar -#{options} #{archive} -C #{dir}))
execute_cmd(%W(chmod -R #{UNTAR_MASK} #{dir})) execute_cmd(%W(chmod -R #{UNTAR_MASK} #{dir}))
remove_symlinks(dir) clean_extraction_dir!(dir)
end end
# rubocop:disable Gitlab/ModuleWithInstanceVariables # rubocop:disable Gitlab/ModuleWithInstanceVariables
@ -122,17 +125,27 @@ module Gitlab
true true
end end
def remove_symlinks(dir) # Scans and cleans the directory tree.
ignore_file_names = %w[. ..] # Symlinks are considered legal but are removed.
# Files sharing hard links are considered illegal and the directory will be removed
# and a `HardLinkError` exception will be raised.
#
# @raise [HardLinkError] if there multiple hard links to the same file detected.
# @return [Boolean] true
def clean_extraction_dir!(dir)
# Using File::FNM_DOTMATCH to also delete symlinks starting with "." # Using File::FNM_DOTMATCH to also delete symlinks starting with "."
Dir.glob("#{dir}/**/*", File::FNM_DOTMATCH) Dir.glob("#{dir}/**/*", File::FNM_DOTMATCH).each do |filepath|
.reject { |f| ignore_file_names.include?(File.basename(f)) } next if CLEAN_DIR_IGNORE_FILE_NAMES.include?(File.basename(filepath))
.each do |filepath|
FileUtils.rm(filepath) if File.lstat(filepath).symlink? raise HardLinkError, 'File shares hard link' if Gitlab::Utils::FileInfo.shares_hard_link?(filepath)
end
FileUtils.rm(filepath) if Gitlab::Utils::FileInfo.linked?(filepath)
end
true true
rescue HardLinkError
FileUtils.remove_dir(dir)
raise
end end
end end
end end

View file

@ -87,7 +87,7 @@ module Gitlab
def validate_archive_path def validate_archive_path
Gitlab::Utils.check_path_traversal!(@archive_path) Gitlab::Utils.check_path_traversal!(@archive_path)
raise(ServiceError, 'Archive path is a symlink') if File.lstat(@archive_path).symlink? raise(ServiceError, 'Archive path is a symlink or hard link') if Gitlab::Utils::FileInfo.linked?(@archive_path)
raise(ServiceError, 'Archive path is not a file') unless File.file?(@archive_path) raise(ServiceError, 'Archive path is not a file') unless File.file?(@archive_path)
end end

View file

@ -23,7 +23,7 @@ module Gitlab
mkdir_p(@shared.export_path) mkdir_p(@shared.export_path)
mkdir_p(@shared.archive_path) mkdir_p(@shared.archive_path)
remove_symlinks(@shared.export_path) clean_extraction_dir!(@shared.export_path)
copy_archive copy_archive
wait_for_archived_file do wait_for_archived_file do
@ -35,7 +35,7 @@ module Gitlab
false false
ensure ensure
remove_import_file remove_import_file
remove_symlinks(@shared.export_path) clean_extraction_dir!(@shared.export_path)
end end
private private

View file

@ -21,7 +21,9 @@ module Gitlab
# This reads from `tree/project.json` # This reads from `tree/project.json`
path = file_path("#{importable_path}.json") path = file_path("#{importable_path}.json")
raise Gitlab::ImportExport::Error, 'Invalid file' if !File.exist?(path) || File.symlink?(path) if !File.exist?(path) || Gitlab::Utils::FileInfo.linked?(path)
raise Gitlab::ImportExport::Error, 'Invalid file'
end
data = File.read(path, MAX_JSON_DOCUMENT_SIZE) data = File.read(path, MAX_JSON_DOCUMENT_SIZE)
json_decode(data) json_decode(data)
@ -34,7 +36,7 @@ module Gitlab
# This reads from `tree/project/merge_requests.ndjson` # This reads from `tree/project/merge_requests.ndjson`
path = file_path(importable_path, "#{key}.ndjson") path = file_path(importable_path, "#{key}.ndjson")
next if !File.exist?(path) || File.symlink?(path) next if !File.exist?(path) || Gitlab::Utils::FileInfo.linked?(path)
File.foreach(path, MAX_JSON_DOCUMENT_SIZE).with_index do |line, line_num| File.foreach(path, MAX_JSON_DOCUMENT_SIZE).with_index do |line, line_num|
documents << [json_decode(line), line_num] documents << [json_decode(line), line_num]

View file

@ -57,7 +57,7 @@ module Gitlab
source_child = File.join(source_path, child) source_child = File.join(source_path, child)
target_child = File.join(target_path, child) target_child = File.join(target_path, child)
next if File.lstat(source_child).symlink? next if Gitlab::Utils::FileInfo.linked?(source_child)
if File.directory?(source_child) if File.directory?(source_child)
FileUtils.mkdir_p(target_child, mode: DEFAULT_DIR_MODE) unless File.exist?(target_child) FileUtils.mkdir_p(target_child, mode: DEFAULT_DIR_MODE) unless File.exist?(target_child)

View file

@ -10,13 +10,12 @@ module Gitlab
def execute def execute
return if host.blank? return if host.blank?
gitlab_host = ::Settings.pages.host.downcase.prepend(".") gitlab_host = ::Gitlab.config.pages.host.downcase.prepend(".")
if host.ends_with?(gitlab_host) if host.ends_with?(gitlab_host)
name = host.delete_suffix(gitlab_host) name = host.delete_suffix(gitlab_host)
by_namespace_domain(name) || by_unique_domain(name) || by_namespace_domain(name)
by_unique_domain(name)
else else
by_custom_domain(host) by_custom_domain(host)
end end

View file

@ -130,7 +130,7 @@ module Gitlab
# `NAMESPACE_FORMAT_REGEX`, with the negative lookbehind assertion removed. This means that the client-side validation # `NAMESPACE_FORMAT_REGEX`, with the negative lookbehind assertion removed. This means that the client-side validation
# will pass for usernames ending in `.atom` and `.git`, but will be caught by the server-side validation. # will pass for usernames ending in `.atom` and `.git`, but will be caught by the server-side validation.
PATH_START_CHAR = '[a-zA-Z0-9_\.]' PATH_START_CHAR = '[a-zA-Z0-9_\.]'
PATH_REGEX_STR = PATH_START_CHAR + '[a-zA-Z0-9_\-\.]*' PATH_REGEX_STR = PATH_START_CHAR + '[a-zA-Z0-9_\-\.]' + "{0,#{Namespace::URL_MAX_LENGTH - 1}}"
NAMESPACE_FORMAT_REGEX_JS = PATH_REGEX_STR + '[a-zA-Z0-9_\-]|[a-zA-Z0-9_]' NAMESPACE_FORMAT_REGEX_JS = PATH_REGEX_STR + '[a-zA-Z0-9_\-]|[a-zA-Z0-9_]'
NO_SUFFIX_REGEX = /(?<!\.git|\.atom)/.freeze NO_SUFFIX_REGEX = /(?<!\.git|\.atom)/.freeze

20
lib/gitlab/plantuml.rb Normal file
View file

@ -0,0 +1,20 @@
# frozen_string_literal: true
require "asciidoctor_plantuml/plantuml"
module Gitlab
module Plantuml
class << self
def configure
Asciidoctor::PlantUml.configure do |conf|
conf.url = Gitlab::CurrentSettings.plantuml_url
conf.png_enable = Gitlab::CurrentSettings.plantuml_enabled
conf.svg_enable = false
conf.txt_enable = false
conf
end
end
end
end
end

View file

@ -0,0 +1,35 @@
# frozen_string_literal: true
module Gitlab
module Utils
module FileInfo
class << self
# Returns true if:
# - File or directory is a symlink.
# - File shares a hard link.
def linked?(file)
stat = to_file_stat(file)
stat.symlink? || shares_hard_link?(stat)
end
# Returns:
# - true if file shares a hard link with another file.
# - false if file is a directory, as directories cannot be hard linked.
def shares_hard_link?(file)
stat = to_file_stat(file)
stat.file? && stat.nlink > 1
end
private
def to_file_stat(filepath_or_stat)
return filepath_or_stat if filepath_or_stat.is_a?(File::Stat)
File.lstat(filepath_or_stat)
end
end
end
end
end

View file

@ -4,7 +4,7 @@ require 'jwt'
module JSONWebToken module JSONWebToken
class HMACToken < Token class HMACToken < Token
IAT_LEEWAY = 60 LEEWAY = 60
JWT_ALGORITHM = 'HS256' JWT_ALGORITHM = 'HS256'
def initialize(secret) def initialize(secret)
@ -13,7 +13,7 @@ module JSONWebToken
@secret = secret @secret = secret
end end
def self.decode(token, secret, leeway: IAT_LEEWAY, verify_iat: true) def self.decode(token, secret, leeway: LEEWAY, verify_iat: false)
JWT.decode(token, secret, true, leeway: leeway, verify_iat: verify_iat, algorithm: JWT_ALGORITHM) JWT.decode(token, secret, true, leeway: leeway, verify_iat: verify_iat, algorithm: JWT_ALGORITHM)
end end

View file

@ -35713,6 +35713,9 @@ msgstr ""
msgid "ProjectSettings|With GitLab Pages you can host your static websites on GitLab. GitLab Pages uses a caching mechanism for efficiency. Your changes may not take effect until that cache is invalidated, which usually takes less than a minute." msgid "ProjectSettings|With GitLab Pages you can host your static websites on GitLab. GitLab Pages uses a caching mechanism for efficiency. Your changes may not take effect until that cache is invalidated, which usually takes less than a minute."
msgstr "" msgstr ""
msgid "ProjectSetting|already in use"
msgstr ""
msgid "ProjectTemplates|.NET Core" msgid "ProjectTemplates|.NET Core"
msgstr "" msgstr ""
@ -36007,6 +36010,9 @@ msgstr ""
msgid "ProjectsNew|Your project will be created at:" msgid "ProjectsNew|Your project will be created at:"
msgstr "" msgstr ""
msgid "Project|already in use"
msgstr ""
msgid "PrometheusAlerts|exceeded" msgid "PrometheusAlerts|exceeded"
msgstr "" msgstr ""
@ -53168,9 +53174,6 @@ msgstr ""
msgid "eligible users" msgid "eligible users"
msgstr "" msgstr ""
msgid "email '%{email}' is not a verified email."
msgstr ""
msgid "email address settings" msgid "email address settings"
msgstr "" msgstr ""
@ -53476,6 +53479,9 @@ msgstr ""
msgid "is not valid. The iteration group has to match the iteration cadence group." msgid "is not valid. The iteration group has to match the iteration cadence group."
msgstr "" msgstr ""
msgid "is not verified."
msgstr ""
msgid "is one of" msgid "is one of"
msgstr "" msgstr ""

View file

@ -59,7 +59,7 @@
"@gitlab/svgs": "3.46.0", "@gitlab/svgs": "3.46.0",
"@gitlab/ui": "62.10.0", "@gitlab/ui": "62.10.0",
"@gitlab/visual-review-tools": "1.7.3", "@gitlab/visual-review-tools": "1.7.3",
"@gitlab/web-ide": "0.0.1-dev-20230511143809", "@gitlab/web-ide": "0.0.1-dev-20230713160749-patch-1",
"@mattiasbuelens/web-streams-adapter": "^0.1.0", "@mattiasbuelens/web-streams-adapter": "^0.1.0",
"@popperjs/core": "^2.11.2", "@popperjs/core": "^2.11.2",
"@rails/actioncable": "6.1.4-7", "@rails/actioncable": "6.1.4-7",

View file

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::PipelineSchedulesController, feature_category: :continuous_integration do RSpec.describe Projects::PipelineSchedulesController, feature_category: :continuous_integration do
include AccessMatchersForController include AccessMatchersForController
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) } let_it_be(:project) { create(:project, :public, :repository) }
@ -45,6 +46,43 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end end
end end
shared_examples 'protecting ref' do
where(:branch_access_levels, :tag_access_level, :maintainer_accessible, :developer_accessible) do
[:no_one_can_push, :no_one_can_merge] | :no_one_can_create | \
:be_denied_for | :be_denied_for
[:maintainers_can_push, :maintainers_can_merge] | :maintainers_can_create | \
:be_allowed_for | :be_denied_for
[:developers_can_push, :developers_can_merge] | :developers_can_create | \
:be_allowed_for | :be_allowed_for
end
with_them do
context 'when branch is protected' do
let(:ref_prefix) { 'heads' }
let(:ref_name) { 'master' }
before do
create(:protected_branch, *branch_access_levels, name: ref_name, project: project)
end
it { expect { go }.to try(maintainer_accessible, :maintainer).of(project) }
it { expect { go }.to try(developer_accessible, :developer).of(project) }
end
context 'when tag is protected' do
let(:ref_prefix) { 'tags' }
let(:ref_name) { 'v1.0.0' }
before do
create(:protected_tag, tag_access_level, name: ref_name, project: project)
end
it { expect { go }.to try(maintainer_accessible, :maintainer).of(project) }
it { expect { go }.to try(developer_accessible, :developer).of(project) }
end
end
end
describe 'GET #index' do describe 'GET #index' do
render_views render_views
@ -158,7 +196,9 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end end
describe 'security' do describe 'security' do
let(:schedule) { attributes_for(:ci_pipeline_schedule) } let(:schedule) { attributes_for(:ci_pipeline_schedule, ref: "refs/#{ref_prefix}/#{ref_name}") }
let(:ref_prefix) { 'heads' }
let(:ref_name) { "master" }
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin) expect { go }.to be_allowed_for(:admin)
@ -177,6 +217,8 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
it { expect { go }.to be_denied_for(:user) } it { expect { go }.to be_denied_for(:user) }
it { expect { go }.to be_denied_for(:external) } it { expect { go }.to be_denied_for(:external) }
it { expect { go }.to be_denied_for(:visitor) } it { expect { go }.to be_denied_for(:visitor) }
it_behaves_like 'protecting ref'
end end
def go def go
@ -427,7 +469,7 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end end
describe 'POST #play', :clean_gitlab_redis_rate_limiting do describe 'POST #play', :clean_gitlab_redis_rate_limiting do
let(:ref) { 'master' } let(:ref_name) { 'master' }
before do before do
project.add_developer(user) project.add_developer(user)
@ -443,7 +485,7 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
it 'does not allow pipeline to be executed' do it 'does not allow pipeline to be executed' do
expect(RunPipelineScheduleWorker).not_to receive(:perform_async) expect(RunPipelineScheduleWorker).not_to receive(:perform_async)
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id } go
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:not_found)
end end
@ -453,16 +495,14 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
it 'executes a new pipeline' do it 'executes a new pipeline' do
expect(RunPipelineScheduleWorker).to receive(:perform_async).with(pipeline_schedule.id, user.id).and_return('job-123') expect(RunPipelineScheduleWorker).to receive(:perform_async).with(pipeline_schedule.id, user.id).and_return('job-123')
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id } go
expect(flash[:notice]).to start_with 'Successfully scheduled a pipeline to run' expect(flash[:notice]).to start_with 'Successfully scheduled a pipeline to run'
expect(response).to have_gitlab_http_status(:found) expect(response).to have_gitlab_http_status(:found)
end end
it 'prevents users from scheduling the same pipeline repeatedly' do it 'prevents users from scheduling the same pipeline repeatedly' do
2.times do 2.times { go }
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id }
end
expect(flash.to_a.size).to eq(2) expect(flash.to_a.size).to eq(2)
expect(flash[:alert]).to eq _('You cannot play this scheduled pipeline at the moment. Please wait a minute.') expect(flash[:alert]).to eq _('You cannot play this scheduled pipeline at the moment. Please wait a minute.')
@ -470,17 +510,14 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end end
end end
context 'when a developer attempts to schedule a protected ref' do describe 'security' do
it 'does not allow pipeline to be executed' do let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, ref: "refs/#{ref_prefix}/#{ref_name}") }
create(:protected_branch, project: project, name: ref)
protected_schedule = create(:ci_pipeline_schedule, project: project, ref: ref)
expect(RunPipelineScheduleWorker).not_to receive(:perform_async) it_behaves_like 'protecting ref'
end
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: protected_schedule.id } def go
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id }
expect(response).to have_gitlab_http_status(:not_found)
end
end end
end end

View file

@ -1,294 +1,294 @@
User-Agent: Microsoft-MacOutlook/10.22.0.200209 User-Agent: Microsoft-MacOutlook/10.22.0.200209
Date: Mon, 17 Feb 2020 22:56:47 +0100 Date: Mon, 17 Feb 2020 22:56:47 +0100
Subject: Re: htmltest | test issue (#1) Subject: Re: htmltest | test issue (#1)
From: "Louzan Martinez, Diego (ext) (SI BP R&D ZG)" From: "Louzan Martinez, Diego (ext) (SI BP R&D ZG)"
<diego.louzan.ext@siemens.com> <diego.louzan.ext@siemens.com>
To: Administrator / htmltest To: Administrator / htmltest
<dlouzan.dummy+c034670b1623e617e15a3df64223d363@gmail.com> <dlouzan.dummy+c034670b1623e617e15a3df64223d363@gmail.com>
Message-ID: <012E37D9-2A3F-4AC8-B79A-871F42914D86@siemens.com> Message-ID: <012E37D9-2A3F-4AC8-B79A-871F42914D86@siemens.com>
Thread-Topic: htmltest | test issue (#1) Thread-Topic: htmltest | test issue (#1)
References: <reply-c034670b1623e617e15a3df64223d363@169.254.169.254> References: <reply-c034670b1623e617e15a3df64223d363@169.254.169.254>
<issue_451@169.254.169.254> <issue_451@169.254.169.254>
<note_1797@169.254.169.254> <note_1797@169.254.169.254>
In-Reply-To: <note_1797@169.254.169.254> In-Reply-To: <note_1797@169.254.169.254>
Content-type: multipart/signed; Content-type: multipart/signed;
protocol="application/pkcs7-signature"; protocol="application/pkcs7-signature";
micalg=sha256; micalg=sha256;
boundary="B_3664825007_1904734766" boundary="B_3664825007_1904734766"
MIME-Version: 1.0 MIME-Version: 1.0
--B_3664825007_1904734766 --B_3664825007_1904734766
Content-type: multipart/mixed; Content-type: multipart/mixed;
boundary="B_3664825007_384940722" boundary="B_3664825007_384940722"
--B_3664825007_384940722 --B_3664825007_384940722
Content-type: multipart/alternative; Content-type: multipart/alternative;
boundary="B_3664825007_1519466360" boundary="B_3664825007_1519466360"
--B_3664825007_1519466360 --B_3664825007_1519466360
Content-type: text/plain; Content-type: text/plain;
charset="UTF-8" charset="UTF-8"
Content-transfer-encoding: quoted-printable Content-transfer-encoding: quoted-printable
Me too, with an attachment Me too, with an attachment
=20 =20
From: Administrator <dlouzan.dummy@gmail.com> From: Administrator <dlouzan.dummy@gmail.com>
Reply to: Administrator / htmltest <dlouzan.dummy+c034670b1623e617e15a3df64= Reply to: Administrator / htmltest <dlouzan.dummy+c034670b1623e617e15a3df64=
223d363@gmail.com> 223d363@gmail.com>
Date: Monday, 17 February 2020 at 22:55 Date: Monday, 17 February 2020 at 22:55
To: "Louzan Martinez, Diego (ext) (SOP IT STG XS)" <diego.louzan.ext@siemen= To: "Louzan Martinez, Diego (ext) (SOP IT STG XS)" <diego.louzan.ext@siemen=
s.com> s.com>
Subject: Re: htmltest | test issue (#1) Subject: Re: htmltest | test issue (#1)
=20 =20
Administrator commented:=20 Administrator commented:=20
I pity the foo !!! I pity the foo !!!
=E2=80=94=20 =E2=80=94=20
Reply to this email directly or view it on GitLab.=20 Reply to this email directly or view it on GitLab.=20
You're receiving this email because of your account on 169.254.169.254. If = You're receiving this email because of your account on 169.254.169.254. If =
you'd like to receive fewer emails, you can unsubscribe from this thread or = you'd like to receive fewer emails, you can unsubscribe from this thread or =
adjust your notification settings.=20 adjust your notification settings.=20
--B_3664825007_1519466360 --B_3664825007_1519466360
Content-type: text/html; Content-type: text/html;
charset="UTF-8" charset="UTF-8"
Content-transfer-encoding: quoted-printable Content-transfer-encoding: quoted-printable
<html xmlns:o=3D"urn:schemas-microsoft-com:office:office" xmlns:w=3D"urn:schema= <html xmlns:o=3D"urn:schemas-microsoft-com:office:office" xmlns:w=3D"urn:schema=
s-microsoft-com:office:word" xmlns:m=3D"http://schemas.microsoft.com/office/20= s-microsoft-com:office:word" xmlns:m=3D"http://schemas.microsoft.com/office/20=
04/12/omml" xmlns=3D"http://www.w3.org/TR/REC-html40"><head><meta http-equiv=3DC= 04/12/omml" xmlns=3D"http://www.w3.org/TR/REC-html40"><head><meta http-equiv=3DC=
ontent-Type content=3D"text/html; charset=3Dutf-8"><meta name=3DGenerator content=3D= ontent-Type content=3D"text/html; charset=3Dutf-8"><meta name=3DGenerator content=3D=
"Microsoft Word 15 (filtered medium)"><title>GitLab</title><style><!-- "Microsoft Word 15 (filtered medium)"><title>GitLab</title><style><!--
/* Font Definitions */ /* Font Definitions */
@font-face @font-face
{font-family:"Cambria Math"; {font-family:"Cambria Math";
panose-1:2 4 5 3 5 4 6 3 2 4;} panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face @font-face
{font-family:Calibri; {font-family:Calibri;
panose-1:2 15 5 2 2 2 4 3 2 4;} panose-1:2 15 5 2 2 2 4 3 2 4;}
/* Style Definitions */ /* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal p.MsoNormal, li.MsoNormal, div.MsoNormal
{margin:0cm; {margin:0cm;
margin-bottom:.0001pt; margin-bottom:.0001pt;
font-size:11.0pt; font-size:11.0pt;
font-family:"Calibri",sans-serif;} font-family:"Calibri",sans-serif;}
a:link, span.MsoHyperlink a:link, span.MsoHyperlink
{mso-style-priority:99; {mso-style-priority:99;
color:blue; color:blue;
text-decoration:underline;} text-decoration:underline;}
span.EmailStyle19 span.EmailStyle19
{mso-style-type:personal-reply; {mso-style-type:personal-reply;
font-family:"Calibri",sans-serif; font-family:"Calibri",sans-serif;
color:windowtext;} color:windowtext;}
.MsoChpDefault .MsoChpDefault
{mso-style-type:export-only; {mso-style-type:export-only;
font-size:10.0pt;} font-size:10.0pt;}
@page WordSection1 @page WordSection1
{size:612.0pt 792.0pt; {size:612.0pt 792.0pt;
margin:72.0pt 72.0pt 72.0pt 72.0pt;} margin:72.0pt 72.0pt 72.0pt 72.0pt;}
div.WordSection1 div.WordSection1
{page:WordSection1;} {page:WordSection1;}
--></style></head><body lang=3Den-ES link=3Dblue vlink=3Dpurple><div class=3DWordSe= --></style></head><body lang=3Den-ES link=3Dblue vlink=3Dpurple><div class=3DWordSe=
ction1><p class=3DMsoNormal><span lang=3DEN-US style=3D'mso-fareast-language:EN-US= ction1><p class=3DMsoNormal><span lang=3DEN-US style=3D'mso-fareast-language:EN-US=
'>Me too, with an attachment<o:p></o:p></span></p><p class=3DMsoNormal><span s= '>Me too, with an attachment<o:p></o:p></span></p><p class=3DMsoNormal><span s=
tyle=3D'mso-fareast-language:EN-US'><o:p>&nbsp;</o:p></span></p><div style=3D'bo= tyle=3D'mso-fareast-language:EN-US'><o:p>&nbsp;</o:p></span></p><div style=3D'bo=
rder:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm'><p class= rder:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm'><p class=
=3DMsoNormal><b><span style=3D'font-size:12.0pt;color:black'>From: </span></b><s= =3DMsoNormal><b><span style=3D'font-size:12.0pt;color:black'>From: </span></b><s=
pan style=3D'font-size:12.0pt;color:black'>Administrator &lt;dlouzan.dummy@gma= pan style=3D'font-size:12.0pt;color:black'>Administrator &lt;dlouzan.dummy@gma=
il.com&gt;<br><b>Reply to: </b>Administrator / htmltest &lt;dlouzan.dummy+c0= il.com&gt;<br><b>Reply to: </b>Administrator / htmltest &lt;dlouzan.dummy+c0=
34670b1623e617e15a3df64223d363@gmail.com&gt;<br><b>Date: </b>Monday, 17 Febr= 34670b1623e617e15a3df64223d363@gmail.com&gt;<br><b>Date: </b>Monday, 17 Febr=
uary 2020 at 22:55<br><b>To: </b>&quot;Louzan Martinez, Diego (ext) (SOP IT = uary 2020 at 22:55<br><b>To: </b>&quot;Louzan Martinez, Diego (ext) (SOP IT =
STG XS)&quot; &lt;diego.louzan.ext@siemens.com&gt;<br><b>Subject: </b>Re: ht= STG XS)&quot; &lt;diego.louzan.ext@siemens.com&gt;<br><b>Subject: </b>Re: ht=
mltest | test issue (#1)<o:p></o:p></span></p></div><div><p class=3DMsoNormal>= mltest | test issue (#1)<o:p></o:p></span></p></div><div><p class=3DMsoNormal>=
<o:p>&nbsp;</o:p></p></div><div><p><span style=3D'color:#777777'><a href=3D"http= <o:p>&nbsp;</o:p></p></div><div><p><span style=3D'color:#777777'><a href=3D"http=
://localhost:3000/root">Administrator</a> commented: <o:p></o:p></span></p><= ://localhost:3000/root">Administrator</a> commented: <o:p></o:p></span></p><=
div><p>I pity the foo !!!<o:p></o:p></p></div></div><div style=3D'margin-top:7= div><p>I pity the foo !!!<o:p></o:p></p></div></div><div style=3D'margin-top:7=
.5pt'><p><span style=3D'font-size:12.0pt;color:#777777'>=E2=80=94 <br>Reply to this = .5pt'><p><span style=3D'font-size:12.0pt;color:#777777'>=E2=80=94 <br>Reply to this =
email directly or <a href=3D"http://localhost:3000/root/htmltest/issues/1#note= email directly or <a href=3D"http://localhost:3000/root/htmltest/issues/1#note=
_1797">view it on GitLab</a>. <br>You're receiving this email because of you= _1797">view it on GitLab</a>. <br>You're receiving this email because of you=
r account on 169.254.169.254. If you'd like to receive fewer emails, you can= r account on 169.254.169.254. If you'd like to receive fewer emails, you can=
<a href=3D"http://localhost:3000/sent_notifications/c034670b1623e617e15a3df64= <a href=3D"http://localhost:3000/sent_notifications/c034670b1623e617e15a3df64=
223d363/unsubscribe">unsubscribe</a> from this thread or adjust your notific= 223d363/unsubscribe">unsubscribe</a> from this thread or adjust your notific=
ation settings. <o:p></o:p></span></p></div></div></body></html> ation settings. <o:p></o:p></span></p></div></div></body></html>
--B_3664825007_1519466360-- --B_3664825007_1519466360--
--B_3664825007_384940722 --B_3664825007_384940722
Content-type: image/png; name="gitlab_logo.png"; Content-type: image/png; name="gitlab_logo.png";
x-mac-creator="4F50494D"; x-mac-creator="4F50494D";
x-mac-type="504E4766" x-mac-type="504E4766"
Content-disposition: attachment; Content-disposition: attachment;
filename="gitlab_logo.png" filename="gitlab_logo.png"
Content-transfer-encoding: base64 Content-transfer-encoding: base64
iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAIAAABMXPacAAAABnRSTlMA/wD/AP83WBt9AAAN iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAIAAABMXPacAAAABnRSTlMA/wD/AP83WBt9AAAN
1UlEQVR4AexcZXPjSBTcXxOTvMy7xxfGZWaGaJmZmZmZmZmZmdnMzB7JNwv1qs6VOJY0tuWU 1UlEQVR4AexcZXPjSBTcXxOTvMy7xxfGZWaGaJmZmZmZmZmZmdnMzB7JNwv1qs6VOJY0tuWU
p/rz5PW0q0f99JQakcxK6eItQGZlBMgIkFkZATICZFZGgIwAmZURICMAshitiybrexXblk5D p/rz5PW0q0f99JQakcxK6eItQGZlBMgIkFkZATICZFZGgIwAmZURICMAshitiybrexXblk5D
NnOk2i3G6bCvmYcJWuaMCevVohPAsWGx6h/Zd/wrd2xbWf0EcB3YqsqmfnK0LZseYZCIBEBW NnOk2i3G6bCvmYcJWuaMCevVohPAsWGx6h/Zd/wrd2xbWf0EcB3YqsqmfnK0LZseYZCIBEBW
E/5p4Mp+wtCvJWO3Vqufv8dtHNoZCOo6ZYd1ahEJ4LtzRZ1fC+pTF9T1P7hZnQQIvHqiKW0I E/5p4Mp+wtCvJWO3Vqufv8dtHNoZCOo6ZYd1ahEJ4LtzRZ1fC+pTF9T1P7hZnQQIvHqiKW0I
BFU5lPfiCREJYFs5C4r7Cfu6BdVJAOeutVEErfPGRRhGFAIgu1Xft0VUfYaBbRmXI1ItFuvz BFU5lPfiCREJYFs5C4r7Cfu6BdVJAOeutVEErfPGRRhGFAIgu1Xft0VUfYaBbRmXI1ItFuvz
Gkd0jyKo65oXNupEIYD//g11QZ2o+tRF9QJP7lUPAYJvX2haNIkmmKv0Xj0rCgHsa+dDWRgA Gkd0jyKo65oXNupEIYD//g11QZ2o+tRF9QJP7lUPAYJvX2haNIkmmKv0Xj0rCgHsa+dDWRgA
x+al1eT5Z9+mCglaF02KsGyKBWCcdsOA1hXWZ6A7MB5X2vtPwG8a07tCgvoehchsSLEA/sd3 x+al1eT5Z9+mCglaF02KsGyKBWCcdsOA1hXWZ6A7MB5X2vtPwG8a07tCgvoehchsSLEA/sd3
sNtUWJ+mpEHgxaN0FyD08Y2mVbMKCarzavluXkyxAI5NS3AplcG5fVXa+8+h7TEI4kSWSgEY sNtUWJ+mpEHgxaN0FyD08Y2mVbMKCarzavluXkyxAI5NS3AplcG5fVXa+8+h7TEI4kSWSgEY
t9NQ3j5GfcZhXRivJ439JxgwT+gfg6C+dymymlMmQOD5Q01xgxj1acoaBV8/S2P/+fJe2+b3 t9NQ3j5GfcZhXRivJ439JxgwT+gfg6C+dymymlMmQOD5Q01xgxj1acoaBV8/S2P/+fJe2+b3
GATV+bV9d6+lTADc88FFxIZz9/r0FcB9fE+VBO2r56RGAMYL7ZFYMI3qwfp9aek/oZB5Snks GATV+bV9d6+lTADc88FFxIZz9/r0FcB9fE+VBO2r56RGAMYL7ZFYMI3qwfp9aek/oZB5Snks
dtD4cthSIEDw1VNNaaMq69O0bBp8/yot/Uf1Wdv+zyoJqgvr+h/eSoEAzl3roIjYcB3Yko4C dtD4cthSIEDw1VNNaaMq69O0bBp8/yot/Uf1Wdv+zyoJqgvr+h/eSoEAzl3roIjYcB3Yko4C
eE4fxK31eAja1y9MogDQHhnZPU4BTGP74jiTZv6DwpYZw+MkaBgEja9kCRB89xLaI1VC27p5 eE4fxK31eAja1y9MogDQHhnZPU4BTGP74jiTZv6DwpYZw+MkaBgEja9kCRB89xLaI1VC27p5
6NPb9BIgrP2m6/hP1eyg8fX0XlIFcO3fHE9lAPeRnWnmP+ePqbIV8RN0bF6WHAGgPdKHkwDm 6NPb9BIgrP2m6/hP1eyg8fX0XlIFcO3fHE9lAPeRnWnmP+ePqbIV8RN0bF6WHAGgPdKHkwDm
iQPZUDB9XoAhy5zRnAga6Y78Gl81SLVHYkPb9o/Q149p4z96ja5LDieCmpKG0PhKuACuwzvi iQPZUDB9XoAhy5zRnAga6Y78Gl81SLVHYkPb9o/Q149p4z96ja5LDieCmpKG0PhKuACuwzvi
rwze1LtP7EsXAbyXT6lylFw5OnesTrQA0B4ZwLU4DPPUIWw4lA4PQIx1wQQeBI3Du7JeT8IF rwze1LtP7EsXAbyXT6lylFw5OnesTrQA0B4ZwLU4DPPUIWw4lA4PQIx1wQQeBI3Du7JeT8IF
CH35AO0RTtC2/yus/hIR/UImva5bPg+CmrLGwTfPEi6A+/heiCfckK3wnD0sfgF818+rc2ty CH35AO0RTtC2/yus/hIR/UImva5bPg+CmrLGwTfPEi6A+/heiCfckK3wnD0sfgF818+rc2ty
ogZw7tmQWAHYMG6P0FzLAlhmjoggJG7/YW1LpvImaBrVk2vjqwb39shfvOvTdfo3rFOJ2n8s ogZw7tmQWAHYMG6P0FzLAlhmjoggJG7/YW1LpvImaBrVk2vjqwb39shfvOvTdfo3rFOJ2n8s
Jn3PYn7soPGVQAE8Zw6B//BBNp5nOi5q/7l9GSbM+AFPMCZKAGiPCIF13liYZxLhsq2YJZCg Jn3PYn7soPGVQAE8Zw6B//BBNp5nOi5q/7l9GSbM+AFPMCZKAGiPCIF13liYZxLhsq2YJZCg
aVxfNhggLgC0R/7lXxzMMxm0IvUfu0Xfp0wAO2h8vUuIAJ4L0B7hD3UOnmc6I04BYMJMINxH aVxfNhggLgC0R/7lXxzMMxm0IvUfu0Xfp0wAO2h8vUuIAJ4L0B7hD3UOnmc6I04BYMJMINxH
d5EVANojY/jWRH6eifyCCTPBME8aBI0vYgKEDbg9kkukPphnEtWCCTPhgMYXSQG8V05De0Qg d5EVANojY/jWRH6eifyCCTPBME8aBI0vYgKEDbg9kkukPphnEtWCCTPhgMYXSQG8V05De0Qg
1Hk1YZ5JFAsmzArrCWUHja+T+4kKwLLWhRPJFAfzTCJbjo2LCRI0T8ONrzAJAaA90r2AYH36 1Hk1YZ5JFAsmzArrCWUHja+T+4kKwLLWhRPJFAfzTCJbjo2LCRI0T8ONrzAJAaA90r2AYH36
3iUwz5TiBRNmg9sTJKjt8HdY/ZWYAL4bvNsjMeaZropHgMDzB5ri+gQJQuOLiACsbSm0R4jB 3iUwz5TiBRNmg9sTJKjt8HdY/ZWYAL4bvNsjMeaZropHgMDzB5ri+gQJQuOLiACsbSm0R4jB
vmqOiPxn6wriBC2zRkYQIiAAfIBHFnr4kE9kH+CRAIcP+Wpw/QCPBGCe6aYYP8AjBfiQj78A vmqOiPxn6wriBC2zRkYQIiAAfIBHFnr4kE9kH+CRAIcP+Wpw/QCPBGCe6aYYP8AjBfiQj78A
0B75W5YIiORDPufOtQkiaJkLH/LxFYB1W22j2xjL5MaWSsIoU9iGt/LfuYQbAKnEvau2cZ0S 0B75W5YIiORDPufOtQkiaJkLH/LxFYB1W22j2xjL5MaWSsIoU9iGt/LfuYQbAKnEvau2cZ0S
RNBKFzE2vTABtNfDKxqEh8jC5VLyoBWmdnVVubXUeamBKremsXXdULkiIezwoS2uy349I0gA RNBKFzE2vTABtNfDKxqEh8jC5VLyoBWmdnVVubXUeamBKremsXXdULkiIezwoS2uy349I0gA
5uFctD0LzaFQuQSVZxEGneXoitM1vGBIAeydlYgGakQxk0Lbspg7EyIsy1eAgJ051RLtyEJb 5uFctD0LzaFQuQSVZxEGneXoitM1vGBIAeydlYgGakQxk0Lbspg7EyIsy1eAgJ051RLtyEJb
ZWiyAg0mX6W/P6XJU6Tq9NW5Cl9fCtGkeeGDmqBAW+Tfj+5YXsRr4CkAq7+N9tT+vsvOLLRB ZWiyAg0mX6W/P6XJU6Tq9NW5Cl9fCtGkeeGDmqBAW+Tfj+5YXsRr4CkAq7+N9tT+vsvOLLRB
gcbIiWsQLpdhu1T9nRoBDKXK0GAZ+d/+KBlap8CH9v3odilY1QWeAjBPFuEtMH5psJJCw6Sk gcbIiWsQLpdhu1T9nRoBDKXK0GAZ+d/+KBlap8CH9v3odilY1QWeAjBPFuEtMH5psJJCw6Sk
XUji6FozVS5k61STvP8MlaLlFNopgaNj7k3lJUDQyZxp82MLgAQtpAhXTKfMhdQ5Ci95/5Gg XUji6FozVS5k61STvP8MlaLlFNopgaNj7k3lJUDQyZxp82MLgAQtpAhXTKfMhdQ5Ci95/5Gg
eRTaIf3fuZ0oivhMnAVgjffR3rq/tgBsl6EZFHEXMpSlwIX0JeT8B6x/Kr54ZdGHtlvJaq5w eRTaIf3fuZ0oivhMnAVgjffR3rq/tgBsl6EZFHEXMpSlwIX0JeT8B6x/Kr54ZdGHtlvJaq5w
FoB5tvx/u4ARbZaj8UQvZFpi71wzBf7TkZD/wOmPlaONv6w/CsyDWRwFCLmZcx2iNwIN1lJo FoB5tvx/u4ARbZaj8UQvZFpi71wzBf7TkZD/wOmPlaONv6w/CsyDWRwFCLmZcx2iNwIN1lJo
pIygC/n6UfiBJNn+04eo/wyXodUUnH4UmFOlEb+VgwCs6THaVz96IwC+YZZSaCixCzmUdBfS pIygC/n6UfiBJNn+04eo/wyXodUUnH4UmFOlEb+VgwCs6THaVz96IwC+YZZSaCixCzmUdBfS
F2P/kRM7/SEStBgu3oqwpxaru8lBAObFmkr2AkghnaWjC1k7EPQfyffMtV0a+8SYR/PjFiDs F2P/kRM7/SEStBgu3oqwpxaru8lBAObFmkr2AkghnaWjC1k7EPQfyffMtV0a+8SYR/PjFiDs
ZS50jb3dr3Q2RfBlAC7Ul8K2kCT/yVZ4euMATMj6J/7KXLHBnG6Fg21cArCW52h/w9jbEU9n ZS50jb3dr3Q2RfBlAC7Ul8K2kCT/yVZ4euMATMj6J/7KXLHBnG6Fg21cArCW52h/w9jbEU9n
+IFEX6pMjgC6YmVwkJxQ5pKj9XDxxsSe2qzhbnwCvNpY9XagwSoK3z9EXMjWMSku9LfM2h78 +IFEX6pMjgC6YmVwkJxQ5pKj9XDxxsSe2qzhbnwCvNpY9XagwSoK3z9EXMjWMSku9LfM2h78
h3Dmig3myZI4BAj7mYs9q9yLfDqjs7x9kuFC6my5pxcJ/6GjM1eVYM62iwRdVQjA2t6gA405 h3Dmig3myZI4BAj7mYs9q9yLfDqjs7x9kuFC6my5pxcJ/6GjM1eVYM62iwRdVQjA2t6gA405
CEAuneHHEhyOEu4/RRQR/4HMxQF767LGh1UJ8GY7t00hnU0QfCHTEmuiXQi/pWoH/iMsc20C CEAuneHHEhyOEu4/RRQR/4HMxQF767LGh1UJ8GY7t00hnU0QfCHTEmuiXQi/pWoH/iMsc20C
6+cA5vmqmAIgP3OlP8dNIZ0phKYzOsvTR6nmMP/La2ZNuP+MgMzFGcz5zpGQq1IBWOsrdLA5 6+cA5vmqmAIgP3OlP8dNIZ0phKYzOsvTR6nmMP/La2ZNuP+MgMzFGcz5zpGQq1IBWOsrdLA5
530hnS0TkM7AhYqVCfSfQuw/ClKZiw/2N2QN9ysVgHm5Hu2EW4UHpGiusHRGS3BEgkhM3H/M 530hnS0TkM7AhYqVCfSfQuw/ClKZiw/2N2QN9ysVgHm5Hu2EW4UHpGiusHRGS3BEgkhM3H/M
bbH/SAVlrlmQuXiCebygcgHOdeSxI5l0Bi7UG7uQPEH+4+oJ/kMoc/HAiaJKBYh+/uF3GWwU bbH/SAVlrlmQuXiCebygcgHOdeSxI5l0Bi7UG7uQPEH+4+oJ/kMoc/HAiaJKBYh+/uF3GWwU
lM7wIwp+UEmEANoCKjBQQThz8cBuZeUCHPqdx46E0xktsbQj6kLgP214+Q9krhX8rT/qYbRy lM7wIwp+UEmEANoCKjBQQThz8cBuZeUCHPqdx46E0xktsbQj6kLgP214+Q9krhX8rT/qYbRy
C7oxXOjukM4W8U1ndBZ+UFFly8n7Tw++/oOJzIfMJRTMpd6VCsBanqFjuWQ0wDfVTIq/CxVS C7oxXOjukM4W8U1ndBZ+UFFly8n7Tw++/oOJzIfMJRTMpd6VCsBanqFjuWQ0wDfVTIq/CxVS
IvKfaZC5BOPwn6z+Tswgpr+DTpaS+WNb+KYzWkrWhfBWptY18bAUn4t3HM5cckHWDzieD+8m IvKfaZC5BOPwn6z+Tswgpr+DTpaS+WNb+KYzWkrWhfBWptY18bAUn4t3HM5cckHWDzieD+8m
Y7ajXd+Ym6PQLorAZbCOYzoDF+qpxKZB0H+c3fEFwCtzraEInP4uOXOtnHV8iPuVZNiLexI8 Y7ajXd+Ym6PQLorAZbCOYzoDF+qpxKZB0H+c3fEFwCtzraEInP4uOXOtnHV8iPuVZNiLexI8
QhmpdBYcqNCScyFNPhUYoOCeuaRoCYmLd39j9uW6SMjNdS6IZY0PfiQDgRVI0Tzu6YyWmtsI QhmpdBYcqNCScyFNPhUYoOCeuaRoCYmLd39j9uW6SMjNdS6IZY0PfiQDgRVI0Tzu6YyWmtsI
diHwn1ZK7v4jQbMFZS54D/P9ZSTL8B1P9xmZBzN+zcfxxjbZ997hYG4u5OpByoXkzm5KRHO0 diHwn1ZK7v4jQbMFZS54D/P9ZSTL8B1P9xmZBzN+zcfxxjbZ997hYG4u5OpByoXkzm5KRHO0
/kmCM9du5ffBUI9W8CdKTJD9fBQd/VdoOhvLLZ0FsAsVUAT8J4/y9+foP6MFZ67Df7Dv90aQ /kmCM9du5ffBUI9W8CdKTJD9fBQd/VdoOhvLLZ0FsAsVUAT8J4/y9+foP6MFZ67Df7Dv90aQ
n8AHGvCegLncD+2U8ddgNdd0JjW3FuxCf+PZU+w/XP7uMGGZa6eUudCNNT9NwL+rCTq+T2vt n8AHGvCegLncD+2U8ddgNdd0JjW3FuxCf+PZU+w/XP7uMGGZa6eUudCNNT9NwL+rCTq+T2vt
ayAonQ2RcHCh7sJdSI5nTxGd8MwFKff79IPfkrB/WcYiVn0ZnSxJTjrDjy7afEqY/yjw7Cmi ayAonQ2RcHCh7sJdSI5nTxGd8MwFKff79IPfkrB/WcYiVn0ZnSxJTjrDjy7afEqY/yjw7Cmi
k5K5juex/7V3Dz5yhVEUwP+cce2GjWu7cW3btm03qm27QRXVtt2ZbO8op/r2vp7qS+a+uHHP k5K5juex/7V3Dz5yhVEUwP+cce2GjWu7cW3btm03qm27QRXVtt2ZbO8op/r2vp7qS+a+uHHP
5r7z252ze2N7UUrZZxMB0FBw6GxQUJ1JdXlEXSHcn3oB7g/MFSPN5a75fyEAQGG5QIHUWe9I 5r7z252ze2N7UUrZZxMB0FBw6GxQUJ1JdXlEXSHcn3oB7g/MFSPN5a75fyEAQGG5QIHUWe9I
wCskBYa4Qrg/rfADSNZces1Poeb/swAoKEBnM4Lq7H372B32Ct2RAUxb3B/KXHzN/wcBcFCA wCskBYa4Qrg/rfADSNZces1Poeb/swAoKEBnM4Lq7H372B32Ct2RAUxb3B/KXHzN/wcBcFCA
zor92sQVIic01eTzprg/pLn0mn/Hgz/mKVC4moECobMgV4gd8snnTfWM5fTL/G1ZlK75HgTA zor92sQVIic01eTzprg/pLn0mn/Hgz/mKVC4moECobMgV4gd8snnTfWM5fTL/G1ZlK75HgTA
QUGu7eJAOhNG6RMaboDXKWOuhTAXUfM9CICGAnTGD/m4AR7MNQunn6j5HgTAQgEv5CnQGTHk QUGu7eJAOhNG6RMaboDXKWOuhTAXUfM9CICGAnTGD/m4AR7MNQunn6j5HgTAQgEv5CnQGTHk
IwZ4MNfE+C80iE2o+Z4GgBTSUOgFKKg6G41vl5JDPmKANyKAuVDzO6HmexAAAQVSZxjy1cMV IwZ4MNfE+C80iE2o+Z4GgBTSUOgFKKg6G41vl5JDPmKANyKAuVDzO6HmexAAAQVSZxjy1cMV
ogd4OP0yc1uimgs1Hx9n8zIAHgp4GSwQnUWZCQ0xwBNzzYO5yJrvfwCAwmmBQklGZ8SQDwM8 ogd4OP0yc1uimgs1Hx9n8zIAHgp4GSwQnUWZCQ0xwBNzzYO5yJrvfwCAwmmBQklGZ8SQDwM8
t7mm4cVL1HzvA+ChEE5OcOoMc2JqgAdzjcU3O4ma70EAPBQup/a3cUEBOhse168QMcCDuSLB t7mm4cVL1HzvA+ChEE5OcOoMc2JqgAdzjcU3O4ma70EAPBQup/a3cUEBOhse168QMcCDuSLB
aj7xu329CICHAnTWHzrThnz6AA//+30VcxE1388AeChAZz0jxJAPAzynuYia738AxPPqRgYK aj7xu329CICHAnTWHzrThnz6AA//+30VcxE1388AeChAZz0jxJAPAzynuYia738AxPPqRgYK
sWJ1Fv7xCgmvlAHMtwM8mGsSzKXW/AIIQIUCdKYP+fQBnkzYVkQcNb8ian5hBQAoNMPX5nc6 sWJ1Fv7xCgmvlAHMtwM8mGsSzKXW/AIIQIUCdKYP+fQBnkzYVkQcNb8ian5hBQAoNMPX5nc6
Gwyd6UM+DPB0cyk1vwACUKAAnfWJ6kO+YgZ4vcRcePHqNb9gAlCggJfBTPyaLveQzzHA6wZz Gwyd6UM+DPB0cyk1vwACUKAAnfWJ6kO+YgZ4vcRcePHqNb9gAlCggJfBTPyaLveQzzHA6wZz
OWu+BaBAATpThnx3McBzmctR8y0ABQrQmXvIhwGe21zrSqfOjUfNtwB0KEBnUegsN+SLOQd4 OWu+BaBAATpThnx3McBzmctR8y0ABQrQmXvIhwGe21zrSqfOjUfNtwB0KEBnUegsN+SLOQd4
MJde8y0ARwqAQj6DudBZZsiXcA5gekSSs2EureZbAAoUquKFPDWns++HfBjgwVyo+RfmoeZb MJde8y0ARwqAQj6DudBZZsiXcA5gekSSs2EureZbAAoUquKFPDWns++HfBjgwVyo+RfmoeZb
ADQUcjobk9HZN0M+DPBgLtT8I0TNtwDcUFiW0dm3Qz7cn4E5c2Vq/gCm5lsAChSgs+wVwgAP ADQUcjobk9HZN0M+DPBgLtT8I0TNtwDcUFiW0dm3Qz7cn4E5c2Vq/gCm5lsAChSgs+wVwgAP
5krX/LV8zbcAFCisjiRnxpI9wrkhX3qAlxCsibnYD+1YAAQUJkQ/dozL8ZEBzIf28eTYaHJt 5krX/LV8zbcAFCisjiRnxpI9wrkhX3qAlxCsibnYD+1YAAQUJkQ/dozL8ZEBzIf28eTYaHJt
Ga7mWwAEFPalNtdNDo89bphIfwBdzLWhBlnzLQD+JwoH+7/qVvFlpwqpPT34mm8B8M/n15+P Ga7mWwAEFPalNtdNDo89bphIfwBdzLWhBlnzLQD+JwoH+7/qVvFlpwqpPT34mm8B8M/n15+P
Lf90cGHRpxf4RwvAHt8DsMcCsADssQAsAHssAAvAni8AV5380akCdgAAAABJRU5ErkJggg== Lf90cGHRpxf4RwvAHt8DsMcCsADssQAsAHssAAvAni8AV5380akCdgAAAABJRU5ErkJggg==
--B_3664825007_384940722-- --B_3664825007_384940722--
--B_3664825007_1904734766 --B_3664825007_1904734766
Content-type: application/pkcs7-signature; name="smime.p7s" Content-type: application/pkcs7-signature; name="smime.p7s"
Content-transfer-encoding: base64 Content-transfer-encoding: base64
Content-disposition: attachment; Content-disposition: attachment;
filename="smime.p7s" filename="smime.p7s"
MIIRpwYJKoZIhvcNAQcCoIIRmDCCEZQCAQExDzANBglghkgBZQMEAgEFADALBgkqhkiG9w0B MIIRpwYJKoZIhvcNAQcCoIIRmDCCEZQCAQExDzANBglghkgBZQMEAgEFADALBgkqhkiG9w0B
BwGggg8VMIIHojCCBYqgAwIBAgIEZ5a6PTANBgkqhkiG9w0BAQsFADCBtjELMAkGA1UEBhMC BwGggg8VMIIHojCCBYqgAwIBAgIEZ5a6PTANBgkqhkiG9w0BAQsFADCBtjELMAkGA1UEBhMC
REUxDzANBgNVBAgMBkJheWVybjERMA8GA1UEBwwITXVlbmNoZW4xEDAOBgNVBAoMB1NpZW1l REUxDzANBgNVBAgMBkJheWVybjERMA8GA1UEBwwITXVlbmNoZW4xEDAOBgNVBAoMB1NpZW1l
bnMxETAPBgNVBAUTCFpaWlpaWkE2MR0wGwYDVQQLDBRTaWVtZW5zIFRydXN0IENlbnRlcjE/ bnMxETAPBgNVBAUTCFpaWlpaWkE2MR0wGwYDVQQLDBRTaWVtZW5zIFRydXN0IENlbnRlcjE/
MD0GA1UEAww2U2llbWVucyBJc3N1aW5nIENBIE1lZGl1bSBTdHJlbmd0aCBBdXRoZW50aWNh MD0GA1UEAww2U2llbWVucyBJc3N1aW5nIENBIE1lZGl1bSBTdHJlbmd0aCBBdXRoZW50aWNh
dGlvbiAyMDE2MB4XDTE5MTEyMTE0NDQ0N1oXDTIwMTEyMTE0NDQ0N1owdzERMA8GA1UEBRMI dGlvbiAyMDE2MB4XDTE5MTEyMTE0NDQ0N1oXDTIwMTEyMTE0NDQ0N1owdzERMA8GA1UEBRMI
WjAwM0gwOFQxDjAMBgNVBCoMBURpZWdvMRgwFgYDVQQEDA9Mb3V6YW4gTWFydGluZXoxGDAW WjAwM0gwOFQxDjAMBgNVBCoMBURpZWdvMRgwFgYDVQQEDA9Mb3V6YW4gTWFydGluZXoxGDAW
BgNVBAoMD1NpZW1lbnMtUGFydG5lcjEeMBwGA1UEAwwVTG91emFuIE1hcnRpbmV6IERpZWdv BgNVBAoMD1NpZW1lbnMtUGFydG5lcjEeMBwGA1UEAwwVTG91emFuIE1hcnRpbmV6IERpZWdv
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuInpNaC7NRYD+0pOpHDz2pk9xmPt MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuInpNaC7NRYD+0pOpHDz2pk9xmPt
JGj860SF6Nmn6Eu9EMYKEDfneC6z5QcH+mPS2d0VWgqVVGbRXSPsxJtbi9TCWjQUZdHglEZK JGj860SF6Nmn6Eu9EMYKEDfneC6z5QcH+mPS2d0VWgqVVGbRXSPsxJtbi9TCWjQUZdHglEZK
z9zxoFDh2dvW5/+TOT5Jf78FXyqak0YtY6+oMjQ/i9RUqPL7sIlyXLrBYrILzQ9Afo+7bXZg z9zxoFDh2dvW5/+TOT5Jf78FXyqak0YtY6+oMjQ/i9RUqPL7sIlyXLrBYrILzQ9Afo+7bXZg
v3ypp6xtqAV2ctHzQWFi0onJzxLVYguiVb7fFF9rBEMvSZonuw5tvOwJIhbe5FDFOrDcfbyU v3ypp6xtqAV2ctHzQWFi0onJzxLVYguiVb7fFF9rBEMvSZonuw5tvOwJIhbe5FDFOrDcfbyU
ofZ/wikIZ+A+CE5GryXuuQmGxJaC2QqOkRAWQDzLDx9nG+rKiEs5OvlfEZC7EV1PyjZ93coM ofZ/wikIZ+A+CE5GryXuuQmGxJaC2QqOkRAWQDzLDx9nG+rKiEs5OvlfEZC7EV1PyjZ93coM
faCVdlAgcFZ5fvd37CjyjKl+1QIDAQABo4IC9DCCAvAwggEEBggrBgEFBQcBAQSB9zCB9DAy faCVdlAgcFZ5fvd37CjyjKl+1QIDAQABo4IC9DCCAvAwggEEBggrBgEFBQcBAQSB9zCB9DAy
BggrBgEFBQcwAoYmaHR0cDovL2FoLnNpZW1lbnMuY29tL3BraT9aWlpaWlpBNi5jcnQwQQYI BggrBgEFBQcwAoYmaHR0cDovL2FoLnNpZW1lbnMuY29tL3BraT9aWlpaWlpBNi5jcnQwQQYI
KwYBBQUHMAKGNWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpaWlpBNixMPVBLST9jQUNl KwYBBQUHMAKGNWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpaWlpBNixMPVBLST9jQUNl
cnRpZmljYXRlMEkGCCsGAQUFBzAChj1sZGFwOi8vYWwuc2llbWVucy5jb20vQ049WlpaWlpa cnRpZmljYXRlMEkGCCsGAQUFBzAChj1sZGFwOi8vYWwuc2llbWVucy5jb20vQ049WlpaWlpa
QTYsbz1UcnVzdGNlbnRlcj9jQUNlcnRpZmljYXRlMDAGCCsGAQUFBzABhiRodHRwOi8vb2Nz QTYsbz1UcnVzdGNlbnRlcj9jQUNlcnRpZmljYXRlMDAGCCsGAQUFBzABhiRodHRwOi8vb2Nz
cC5wa2ktc2VydmljZXMuc2llbWVucy5jb20wHwYDVR0jBBgwFoAU+BVdRwxsd3tyxAIXkWii cC5wa2ktc2VydmljZXMuc2llbWVucy5jb20wHwYDVR0jBBgwFoAU+BVdRwxsd3tyxAIXkWii
tvdqCUQwDAYDVR0TAQH/BAIwADBFBgNVHSAEPjA8MDoGDSsGAQQBoWkHAgIEAQMwKTAnBggr tvdqCUQwDAYDVR0TAQH/BAIwADBFBgNVHSAEPjA8MDoGDSsGAQQBoWkHAgIEAQMwKTAnBggr
BgEFBQcCARYbaHR0cDovL3d3dy5zaWVtZW5zLmNvbS9wa2kvMIHKBgNVHR8EgcIwgb8wgbyg BgEFBQcCARYbaHR0cDovL3d3dy5zaWVtZW5zLmNvbS9wa2kvMIHKBgNVHR8EgcIwgb8wgbyg
gbmggbaGJmh0dHA6Ly9jaC5zaWVtZW5zLmNvbS9wa2k/WlpaWlpaQTYuY3JshkFsZGFwOi8v gbmggbaGJmh0dHA6Ly9jaC5zaWVtZW5zLmNvbS9wa2k/WlpaWlpaQTYuY3JshkFsZGFwOi8v
Y2wuc2llbWVucy5uZXQvQ049WlpaWlpaQTYsTD1QS0k/Y2VydGlmaWNhdGVSZXZvY2F0aW9u Y2wuc2llbWVucy5uZXQvQ049WlpaWlpaQTYsTD1QS0k/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
TGlzdIZJbGRhcDovL2NsLnNpZW1lbnMuY29tL0NOPVpaWlpaWkE2LG89VHJ1c3RjZW50ZXI/ TGlzdIZJbGRhcDovL2NsLnNpZW1lbnMuY29tL0NOPVpaWlpaWkE2LG89VHJ1c3RjZW50ZXI/
Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDAdBgNVHSUEFjAUBggrBgEFBQcDAgYIKwYBBQUH Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDAdBgNVHSUEFjAUBggrBgEFBQcDAgYIKwYBBQUH
AwQwDgYDVR0PAQH/BAQDAgeAMFUGA1UdEQROMEygLAYKKwYBBAGCNxQCA6AeDBxkaWVnby5s AwQwDgYDVR0PAQH/BAQDAgeAMFUGA1UdEQROMEygLAYKKwYBBAGCNxQCA6AeDBxkaWVnby5s
b3V6YW4uZXh0QHNpZW1lbnMuY29tgRxkaWVnby5sb3V6YW4uZXh0QHNpZW1lbnMuY29tMB0G b3V6YW4uZXh0QHNpZW1lbnMuY29tgRxkaWVnby5sb3V6YW4uZXh0QHNpZW1lbnMuY29tMB0G
A1UdDgQWBBQj8k8aqZey68w8ALYKGJSGMt5hZDANBgkqhkiG9w0BAQsFAAOCAgEAFDHqxpb1 A1UdDgQWBBQj8k8aqZey68w8ALYKGJSGMt5hZDANBgkqhkiG9w0BAQsFAAOCAgEAFDHqxpb1
R9cB4noC9vx09bkNbmXCpVfl3XCQUmAWTznC0nwEssTTjo0PWuIV4C3jnsp0MRUeHZ6lsyhZ R9cB4noC9vx09bkNbmXCpVfl3XCQUmAWTznC0nwEssTTjo0PWuIV4C3jnsp0MRUeHZ6lsyhZ
OzS1ETwYgvj6wzjb8RF3wgn7N/JOvFGaErMz5HZpKOfzGiNpW6/Rmd4hsRDjAwOVQOXUTqc/ OzS1ETwYgvj6wzjb8RF3wgn7N/JOvFGaErMz5HZpKOfzGiNpW6/Rmd4hsRDjAwOVQOXUTqc/
0Bj3FMoLRCSWSnTp5HdyvrY2xOKHfTrTjzmcLdFaKE2F5n7+dBkwCKVfzut8CqfVq/I7ks4m 0Bj3FMoLRCSWSnTp5HdyvrY2xOKHfTrTjzmcLdFaKE2F5n7+dBkwCKVfzut8CqfVq/I7ks4m
D1IHk93/P6l9U34R2FHPt6zRTNZcWmDirRSlMH4L18CnfiNPuDN/PtRYlt3Vng5EdYN0VCg2 D1IHk93/P6l9U34R2FHPt6zRTNZcWmDirRSlMH4L18CnfiNPuDN/PtRYlt3Vng5EdYN0VCg2
NM/uees0U4ingCb0NFjg66uQ/tjfPQk55MN4Wpls4N6TkMoTCWLiqZzYTGdmVQexzroL6940 NM/uees0U4ingCb0NFjg66uQ/tjfPQk55MN4Wpls4N6TkMoTCWLiqZzYTGdmVQexzroL6940
tmMr8LoN3TpPf0OdvdKEpyH7fzsx5QlmQyywIWec6X+Fx6+l0g91VJnPEtqACpfZIBZtviHl tmMr8LoN3TpPf0OdvdKEpyH7fzsx5QlmQyywIWec6X+Fx6+l0g91VJnPEtqACpfZIBZtviHl
gfX298w+SsvBK8C48Pqs8Ijh7tLrCxx7VMLVHZqwWWPK53ga+CDWmjoSQPxi+CPZF7kao6N5 gfX298w+SsvBK8C48Pqs8Ijh7tLrCxx7VMLVHZqwWWPK53ga+CDWmjoSQPxi+CPZF7kao6N5
4GrJWwSHlHh6WzTbLyLvTJZZ775Utp4W8s8xMUsQJ413iYzEaC8FcSeNjSk5UiDDiHrKmzpM 4GrJWwSHlHh6WzTbLyLvTJZZ775Utp4W8s8xMUsQJ413iYzEaC8FcSeNjSk5UiDDiHrKmzpM
tbApD3pUXStblUMKYGTG1Mj9BcEBFkCdoGlw/ulszIrKFfOyRNDG3Ay+Dj/oMjoKsJphu3px tbApD3pUXStblUMKYGTG1Mj9BcEBFkCdoGlw/ulszIrKFfOyRNDG3Ay+Dj/oMjoKsJphu3px
wyft82rTer7UW/I7o0h0DAG4lkMwggdrMIIFU6ADAgECAgR5nlqfMA0GCSqGSIb3DQEBCwUA wyft82rTer7UW/I7o0h0DAG4lkMwggdrMIIFU6ADAgECAgR5nlqfMA0GCSqGSIb3DQEBCwUA
MIGeMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmF5ZXJuMREwDwYDVQQHDAhNdWVuY2hlbjEQ MIGeMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmF5ZXJuMREwDwYDVQQHDAhNdWVuY2hlbjEQ
MA4GA1UECgwHU2llbWVuczERMA8GA1UEBRMIWlpaWlpaQTMxHTAbBgNVBAsMFFNpZW1lbnMg MA4GA1UECgwHU2llbWVuczERMA8GA1UEBRMIWlpaWlpaQTMxHTAbBgNVBAsMFFNpZW1lbnMg
VHJ1c3QgQ2VudGVyMScwJQYDVQQDDB5TaWVtZW5zIElzc3VpbmcgQ0EgRUUgRW5jIDIwMTYw VHJ1c3QgQ2VudGVyMScwJQYDVQQDDB5TaWVtZW5zIElzc3VpbmcgQ0EgRUUgRW5jIDIwMTYw
HhcNMTkwOTI3MDgwMTM5WhcNMjAwOTI3MDgwMTM3WjB3MREwDwYDVQQFEwhaMDAzSDA4VDEO HhcNMTkwOTI3MDgwMTM5WhcNMjAwOTI3MDgwMTM3WjB3MREwDwYDVQQFEwhaMDAzSDA4VDEO
MAwGA1UEKgwFRGllZ28xGDAWBgNVBAQMD0xvdXphbiBNYXJ0aW5lejEYMBYGA1UECgwPU2ll MAwGA1UEKgwFRGllZ28xGDAWBgNVBAQMD0xvdXphbiBNYXJ0aW5lejEYMBYGA1UECgwPU2ll
bWVucy1QYXJ0bmVyMR4wHAYDVQQDDBVMb3V6YW4gTWFydGluZXogRGllZ28wggEiMA0GCSqG bWVucy1QYXJ0bmVyMR4wHAYDVQQDDBVMb3V6YW4gTWFydGluZXogRGllZ28wggEiMA0GCSqG
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCyby5qKzZIrGYWRqxnaAyMt/a/uc0uMk0F3MjwxvPM SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCyby5qKzZIrGYWRqxnaAyMt/a/uc0uMk0F3MjwxvPM
vh5DllUpqx0l8ZDakDjPhlEXTeoL4DHNgmh+CDCs76CppM3cNG/1W1Ajo/L2iwMoXaxYuQ/F vh5DllUpqx0l8ZDakDjPhlEXTeoL4DHNgmh+CDCs76CppM3cNG/1W1Ajo/L2iwMoXaxYuQ/F
q7ED+02KEkWX2DDVVG3fhrUGP20QAq77xPDptmVWZnUnuobZBNYkC49Xfl9HJvkJL8P0+Jqb q7ED+02KEkWX2DDVVG3fhrUGP20QAq77xPDptmVWZnUnuobZBNYkC49Xfl9HJvkJL8P0+Jqb
Eae7p4roiEr7wNkGriwrVXgA3oPNF/W+OuI76JTNTajS/6PAK/GeqIvLjfuBXpdBZTY031nE Eae7p4roiEr7wNkGriwrVXgA3oPNF/W+OuI76JTNTajS/6PAK/GeqIvLjfuBXpdBZTY031nE
Cztca8vI1jUjQzVhS+0dWpvpfhkVumbvOnid8DI9lapYsX8dpZFsa3ya+T3tjUdGSOOKi0kg Cztca8vI1jUjQzVhS+0dWpvpfhkVumbvOnid8DI9lapYsX8dpZFsa3ya+T3tjUdGSOOKi0kg
lWf/XYyyfhmDAgMBAAGjggLVMIIC0TAdBgNVHQ4EFgQUprhTCDwNLfPImpSfWdq+QvPTo9Mw lWf/XYyyfhmDAgMBAAGjggLVMIIC0TAdBgNVHQ4EFgQUprhTCDwNLfPImpSfWdq+QvPTo9Mw
JwYDVR0RBCAwHoEcZGllZ28ubG91emFuLmV4dEBzaWVtZW5zLmNvbTAOBgNVHQ8BAf8EBAMC JwYDVR0RBCAwHoEcZGllZ28ubG91emFuLmV4dEBzaWVtZW5zLmNvbTAOBgNVHQ8BAf8EBAMC
BDAwLAYDVR0lBCUwIwYIKwYBBQUHAwQGCisGAQQBgjcKAwQGCysGAQQBgjcKAwQBMIHKBgNV BDAwLAYDVR0lBCUwIwYIKwYBBQUHAwQGCisGAQQBgjcKAwQGCysGAQQBgjcKAwQBMIHKBgNV
HR8EgcIwgb8wgbyggbmggbaGJmh0dHA6Ly9jaC5zaWVtZW5zLmNvbS9wa2k/WlpaWlpaQTMu HR8EgcIwgb8wgbyggbmggbaGJmh0dHA6Ly9jaC5zaWVtZW5zLmNvbS9wa2k/WlpaWlpaQTMu
Y3JshkFsZGFwOi8vY2wuc2llbWVucy5uZXQvQ049WlpaWlpaQTMsTD1QS0k/Y2VydGlmaWNh Y3JshkFsZGFwOi8vY2wuc2llbWVucy5uZXQvQ049WlpaWlpaQTMsTD1QS0k/Y2VydGlmaWNh
dGVSZXZvY2F0aW9uTGlzdIZJbGRhcDovL2NsLnNpZW1lbnMuY29tL0NOPVpaWlpaWkEzLG89 dGVSZXZvY2F0aW9uTGlzdIZJbGRhcDovL2NsLnNpZW1lbnMuY29tL0NOPVpaWlpaWkEzLG89
VHJ1c3RjZW50ZXI/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDBFBgNVHSAEPjA8MDoGDSsG VHJ1c3RjZW50ZXI/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDBFBgNVHSAEPjA8MDoGDSsG
AQQBoWkHAgIEAQMwKTAnBggrBgEFBQcCARYbaHR0cDovL3d3dy5zaWVtZW5zLmNvbS9wa2kv AQQBoWkHAgIEAQMwKTAnBggrBgEFBQcCARYbaHR0cDovL3d3dy5zaWVtZW5zLmNvbS9wa2kv
MAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUoassbqB68NPCTeof8R4hivwMre8wggEEBggr MAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUoassbqB68NPCTeof8R4hivwMre8wggEEBggr
BgEFBQcBAQSB9zCB9DAyBggrBgEFBQcwAoYmaHR0cDovL2FoLnNpZW1lbnMuY29tL3BraT9a BgEFBQcBAQSB9zCB9DAyBggrBgEFBQcwAoYmaHR0cDovL2FoLnNpZW1lbnMuY29tL3BraT9a
WlpaWlpBMy5jcnQwQQYIKwYBBQUHMAKGNWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpa WlpaWlpBMy5jcnQwQQYIKwYBBQUHMAKGNWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpa
WlpBMyxMPVBLST9jQUNlcnRpZmljYXRlMEkGCCsGAQUFBzAChj1sZGFwOi8vYWwuc2llbWVu WlpBMyxMPVBLST9jQUNlcnRpZmljYXRlMEkGCCsGAQUFBzAChj1sZGFwOi8vYWwuc2llbWVu
cy5jb20vQ049WlpaWlpaQTMsbz1UcnVzdGNlbnRlcj9jQUNlcnRpZmljYXRlMDAGCCsGAQUF cy5jb20vQ049WlpaWlpaQTMsbz1UcnVzdGNlbnRlcj9jQUNlcnRpZmljYXRlMDAGCCsGAQUF
BzABhiRodHRwOi8vb2NzcC5wa2ktc2VydmljZXMuc2llbWVucy5jb20wDQYJKoZIhvcNAQEL BzABhiRodHRwOi8vb2NzcC5wa2ktc2VydmljZXMuc2llbWVucy5jb20wDQYJKoZIhvcNAQEL
BQADggIBAF98ZMNg28LgkwdjOdvOGbC1QitsWjZTyotmQESF0nClDLUhb0O5675vVixntbrf BQADggIBAF98ZMNg28LgkwdjOdvOGbC1QitsWjZTyotmQESF0nClDLUhb0O5675vVixntbrf
eB8xy1+KRiadk40GnAIJ0YzmNl4Tav6hPYv9VBWe5olsWG7C4qB3Q/SwhvW/e+owxv1cBra8 eB8xy1+KRiadk40GnAIJ0YzmNl4Tav6hPYv9VBWe5olsWG7C4qB3Q/SwhvW/e+owxv1cBra8
R3oRudiN81eTZQHyNghRephVqQG/dpPYqydoANfIhEpHa79QlpaCAeYl4896AZOS8HYbkDFs R3oRudiN81eTZQHyNghRephVqQG/dpPYqydoANfIhEpHa79QlpaCAeYl4896AZOS8HYbkDFs
hLdv7sEHtl79YuSWI1wBjbJl70c0Sb4wLRgCPuHyQj2Uw/vQ5xJlEvBDZAIXXe1TP/nqiuY6 hLdv7sEHtl79YuSWI1wBjbJl70c0Sb4wLRgCPuHyQj2Uw/vQ5xJlEvBDZAIXXe1TP/nqiuY6
7nweJbbeqfFE6ZP3kCe+mEIWGSaO0iThZyLGer8fHs1XiEmhhPgvC7P7KodzpXU6+hX+ZzbD 7nweJbbeqfFE6ZP3kCe+mEIWGSaO0iThZyLGer8fHs1XiEmhhPgvC7P7KodzpXU6+hX+ZzbD
DxEjFfetV5sh0aNSXG9xx4hZmS9bpImBGR8MvZ7cgxqItvLtY2xvfUbYW244d4RcWesaCDq3 DxEjFfetV5sh0aNSXG9xx4hZmS9bpImBGR8MvZ7cgxqItvLtY2xvfUbYW244d4RcWesaCDq3
ZEIo6uCIzOzJAwjUdLIac+lLV0rxiHmb7O3cQ19kjpWDB31hmfrus/TKJ55pBKVWBX5m/mFv ZEIo6uCIzOzJAwjUdLIac+lLV0rxiHmb7O3cQ19kjpWDB31hmfrus/TKJ55pBKVWBX5m/mFv
K8Ep5USpGrNS0EzOP7I1kQZv2VsvAhSxk/m5FMLpDy8T0O8YgbLypTXoeJFWCF6RduSjVsaZ K8Ep5USpGrNS0EzOP7I1kQZv2VsvAhSxk/m5FMLpDy8T0O8YgbLypTXoeJFWCF6RduSjVsaZ
lkAtTQYud683pjyOMxJXaQUYGU1PmEYSOonMkVsT9aBcxYkXLp+Ln/+8G0OCYu7dRdwnj+Ut lkAtTQYud683pjyOMxJXaQUYGU1PmEYSOonMkVsT9aBcxYkXLp+Ln/+8G0OCYu7dRdwnj+Ut
7yR/ltxtgDcaFApCb0qBTKbgbqZk1fASmkOp+kbdYmoUMYICVjCCAlICAQEwgb8wgbYxCzAJ 7yR/ltxtgDcaFApCb0qBTKbgbqZk1fASmkOp+kbdYmoUMYICVjCCAlICAQEwgb8wgbYxCzAJ
BgNVBAYTAkRFMQ8wDQYDVQQIDAZCYXllcm4xETAPBgNVBAcMCE11ZW5jaGVuMRAwDgYDVQQK BgNVBAYTAkRFMQ8wDQYDVQQIDAZCYXllcm4xETAPBgNVBAcMCE11ZW5jaGVuMRAwDgYDVQQK
DAdTaWVtZW5zMREwDwYDVQQFEwhaWlpaWlpBNjEdMBsGA1UECwwUU2llbWVucyBUcnVzdCBD DAdTaWVtZW5zMREwDwYDVQQFEwhaWlpaWlpBNjEdMBsGA1UECwwUU2llbWVucyBUcnVzdCBD
ZW50ZXIxPzA9BgNVBAMMNlNpZW1lbnMgSXNzdWluZyBDQSBNZWRpdW0gU3RyZW5ndGggQXV0 ZW50ZXIxPzA9BgNVBAMMNlNpZW1lbnMgSXNzdWluZyBDQSBNZWRpdW0gU3RyZW5ndGggQXV0
aGVudGljYXRpb24gMjAxNgIEZ5a6PTANBglghkgBZQMEAgEFAKBpMC8GCSqGSIb3DQEJBDEi aGVudGljYXRpb24gMjAxNgIEZ5a6PTANBglghkgBZQMEAgEFAKBpMC8GCSqGSIb3DQEJBDEi
BCAOR58AbNfSrI+vtMs+dgAQtn3IVZ3RjYC5hz3j9k+6TTAYBgkqhkiG9w0BCQMxCwYJKoZI BCAOR58AbNfSrI+vtMs+dgAQtn3IVZ3RjYC5hz3j9k+6TTAYBgkqhkiG9w0BCQMxCwYJKoZI
hvcNAQcBMBwGCSqGSIb3DQEJBTEPFw0yMDAyMTcyMTU2NDdaMA0GCSqGSIb3DQEBAQUABIIB hvcNAQcBMBwGCSqGSIb3DQEJBTEPFw0yMDAyMTcyMTU2NDdaMA0GCSqGSIb3DQEBAQUABIIB
AHLSBcFHhNHPevbwqvA2ecuVb/aKnj45CFF6l8esP1H5DRm1ee5qMKuIS84NFuFC9RUENNhW AHLSBcFHhNHPevbwqvA2ecuVb/aKnj45CFF6l8esP1H5DRm1ee5qMKuIS84NFuFC9RUENNhW
DBzsB+BVGz64o1f8QgIklYVrIJ4JZ0q1abNG7NbkVKWIpS3CQo//YWShUTYg+JpKx4YbahGR DBzsB+BVGz64o1f8QgIklYVrIJ4JZ0q1abNG7NbkVKWIpS3CQo//YWShUTYg+JpKx4YbahGR
sP5zbufbU4eagrrqBChjPTLy+njdjwCNu0XPykBTKOOf6BMjnS33AYjHJyh83JOY7rw3IDLx sP5zbufbU4eagrrqBChjPTLy+njdjwCNu0XPykBTKOOf6BMjnS33AYjHJyh83JOY7rw3IDLx
8POQH4g5EMRpl9354s0rEkIezMt7pfUAsqY3QnQ8hvlE4KTikPQ+tvLMK1l/ffcLAP8BdBNI 8POQH4g5EMRpl9354s0rEkIezMt7pfUAsqY3QnQ8hvlE4KTikPQ+tvLMK1l/ffcLAP8BdBNI
YA3ikb3qCoGNSLKieYzNnBPhNOIJELUtEEaljAFZYMQzMKCbI4JdiDs= YA3ikb3qCoGNSLKieYzNnBPhNOIJELUtEEaljAFZYMQzMKCbI4JdiDs=
--B_3664825007_1904734766-- --B_3664825007_1904734766--

View file

@ -226,6 +226,22 @@ RSpec.describe Banzai::Filter::AutolinkFilter, feature_category: :team_planning
end end
end end
it 'protects against malicious backtracking' do
doc = "http://#{'&' * 1_000_000}x"
expect do
Timeout.timeout(30.seconds) { filter(doc) }
end.not_to raise_error
end
it 'does not timeout with excessively long scheme' do
doc = "#{'h' * 1_000_000}://example.com"
expect do
Timeout.timeout(30.seconds) { filter(doc) }
end.not_to raise_error
end
# Rinku does not escape these characters in HTML attributes, but content_tag # Rinku does not escape these characters in HTML attributes, but content_tag
# does. We don't care about that difference for these specs, though. # does. We don't care about that difference for these specs, though.
def unescape(html) def unescape(html)

View file

@ -39,6 +39,7 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ
it_behaves_like 'fails fast', 'A' * 50000 it_behaves_like 'fails fast', 'A' * 50000
it_behaves_like 'fails fast', '/a' * 50000 it_behaves_like 'fails fast', '/a' * 50000
it_behaves_like 'fails fast', "mailto:#{'a-' * 499_000}@aaaaaaaa..aaaaaaaa.example.com"
end end
it 'allows references with text after the > character' do it 'allows references with text after the > character' do

View file

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline, feature_category: :importers do
let_it_be(:portable) { create(:project) } let_it_be(:portable) { create(:project) }
let_it_be(:oid) { 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' } let_it_be(:oid) { 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' }
@ -118,13 +118,22 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do
context 'when file path is symlink' do context 'when file path is symlink' do
it 'returns' do it 'returns' do
symlink = File.join(tmpdir, 'symlink') symlink = File.join(tmpdir, 'symlink')
FileUtils.ln_s(lfs_file_path, symlink)
FileUtils.ln_s(File.join(tmpdir, lfs_file_path), symlink) expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(symlink).and_call_original
expect { pipeline.load(context, symlink) }.not_to change { portable.lfs_objects.count } expect { pipeline.load(context, symlink) }.not_to change { portable.lfs_objects.count }
end end
end end
context 'when file path shares multiple hard links' do
it 'returns' do
FileUtils.link(lfs_file_path, File.join(tmpdir, 'hard_link'))
expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(lfs_file_path).and_call_original
expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects.count }
end
end
context 'when path is a directory' do context 'when path is a directory' do
it 'returns' do it 'returns' do
expect { pipeline.load(context, Dir.tmpdir) }.not_to change { portable.lfs_objects.count } expect { pipeline.load(context, Dir.tmpdir) }.not_to change { portable.lfs_objects.count }

View file

@ -105,6 +105,7 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline, feature_category
it 'returns' do it 'returns' do
path = File.join(tmpdir, 'test') path = File.join(tmpdir, 'test')
FileUtils.touch(path) FileUtils.touch(path)
expect { pipeline.load(context, path) }.not_to change { portable.uploads.count } expect { pipeline.load(context, path) }.not_to change { portable.uploads.count }
end end
end end
@ -118,13 +119,22 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline, feature_category
context 'when path is a symlink' do context 'when path is a symlink' do
it 'does not upload the file' do it 'does not upload the file' do
symlink = File.join(tmpdir, 'symlink') symlink = File.join(tmpdir, 'symlink')
FileUtils.ln_s(upload_file_path, symlink)
FileUtils.ln_s(File.join(tmpdir, upload_file_path), symlink) expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(symlink).and_call_original
expect { pipeline.load(context, symlink) }.not_to change { portable.uploads.count } expect { pipeline.load(context, symlink) }.not_to change { portable.uploads.count }
end end
end end
context 'when path has multiple hard links' do
it 'does not upload the file' do
FileUtils.link(upload_file_path, File.join(tmpdir, 'hard_link'))
expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(upload_file_path).and_call_original
expect { pipeline.load(context, upload_file_path) }.not_to change { portable.uploads.count }
end
end
context 'when path traverses' do context 'when path traverses' do
it 'does not upload the file' do it 'does not upload the file' do
path_traversal = "#{uploads_dir_path}/avatar/../../../../etc/passwd" path_traversal = "#{uploads_dir_path}/avatar/../../../../etc/passwd"

View file

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline, feature_category: :importers do
let_it_be(:design) { create(:design, :with_file) } let_it_be(:design) { create(:design, :with_file) }
let(:portable) { create(:project) } let(:portable) { create(:project) }
@ -125,9 +125,9 @@ RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do
context 'when path is symlink' do context 'when path is symlink' do
it 'returns' do it 'returns' do
symlink = File.join(tmpdir, 'symlink') symlink = File.join(tmpdir, 'symlink')
FileUtils.ln_s(design_bundle_path, symlink)
FileUtils.ln_s(File.join(tmpdir, design_bundle_path), symlink) expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(symlink).and_call_original
expect(portable.design_repository).not_to receive(:create_from_bundle) expect(portable.design_repository).not_to receive(:create_from_bundle)
pipeline.load(context, symlink) pipeline.load(context, symlink)
@ -136,6 +136,19 @@ RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do
end end
end end
context 'when path has multiple hard links' do
it 'returns' do
FileUtils.link(design_bundle_path, File.join(tmpdir, 'hard_link'))
expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(design_bundle_path).and_call_original
expect(portable.design_repository).not_to receive(:create_from_bundle)
pipeline.load(context, design_bundle_path)
expect(portable.design_repository.exists?).to eq(false)
end
end
context 'when path is not under tmpdir' do context 'when path is not under tmpdir' do
it 'returns' do it 'returns' do
expect { pipeline.load(context, '/home/test.txt') } expect { pipeline.load(context, '/home/test.txt') }

View file

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline, feature_category: :importers do
let_it_be(:source) { create(:project, :repository) } let_it_be(:source) { create(:project, :repository) }
let(:portable) { create(:project) } let(:portable) { create(:project) }
@ -123,9 +123,9 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do
context 'when path is symlink' do context 'when path is symlink' do
it 'returns' do it 'returns' do
symlink = File.join(tmpdir, 'symlink') symlink = File.join(tmpdir, 'symlink')
FileUtils.ln_s(bundle_path, symlink)
FileUtils.ln_s(File.join(tmpdir, bundle_path), symlink) expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(symlink).and_call_original
expect(portable.repository).not_to receive(:create_from_bundle) expect(portable.repository).not_to receive(:create_from_bundle)
pipeline.load(context, symlink) pipeline.load(context, symlink)
@ -134,6 +134,19 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do
end end
end end
context 'when path has mutiple hard links' do
it 'returns' do
FileUtils.link(bundle_path, File.join(tmpdir, 'hard_link'))
expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(bundle_path).and_call_original
expect(portable.repository).not_to receive(:create_from_bundle)
pipeline.load(context, bundle_path)
expect(portable.repository.exists?).to eq(false)
end
end
context 'when path is not under tmpdir' do context 'when path is not under tmpdir' do
it 'returns' do it 'returns' do
expect { pipeline.load(context, '/home/test.txt') } expect { pipeline.load(context, '/home/test.txt') }

View file

@ -32,6 +32,12 @@ RSpec.describe Gitlab::Checks::BranchCheck do
expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.") expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.")
end end
it "prohibits 40-character hexadecimal branch names followed by a dash as the start of a path" do
allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e-/test")
expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.")
end
it "doesn't prohibit a nested hexadecimal in a branch name" do it "doesn't prohibit a nested hexadecimal in a branch name" do
allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e-fix") allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e-fix")

View file

@ -105,6 +105,16 @@ RSpec.describe Gitlab::Ci::DecompressedGzipSizeValidator, feature_category: :imp
end end
end end
context 'when archive path has multiple hard links' do
before do
FileUtils.link(filepath, File.join(Dir.mktmpdir, 'hard_link'))
end
it 'returns false' do
expect(subject).not_to be_valid
end
end
context 'when archive path is not a file' do context 'when archive path is not a file' do
let(:filepath) { Dir.mktmpdir } let(:filepath) { Dir.mktmpdir }
let(:filesize) { File.size(filepath) } let(:filesize) { File.size(filepath) }

View file

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::GithubImport::AttachmentsDownloader do RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :importers do
subject(:downloader) { described_class.new(file_url) } subject(:downloader) { described_class.new(file_url) }
let_it_be(:file_url) { 'https://example.com/avatar.png' } let_it_be(:file_url) { 'https://example.com/avatar.png' }
@ -39,6 +39,26 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader do
end end
end end
context 'when file shares multiple hard links' do
let(:tmpdir) { Dir.mktmpdir }
let(:hard_link) { File.join(tmpdir, 'hard_link') }
before do
existing_file = File.join(tmpdir, 'file.txt')
FileUtils.touch(existing_file)
FileUtils.link(existing_file, hard_link)
allow(downloader).to receive(:filepath).and_return(hard_link)
end
it 'raises expected exception' do
expect(Gitlab::Utils::FileInfo).to receive(:linked?).with(hard_link).and_call_original
expect { downloader.perform }.to raise_exception(
described_class::DownloadError,
'Invalid downloaded file'
)
end
end
context 'when filename is malicious' do context 'when filename is malicious' do
let_it_be(:file_url) { 'https://example.com/ava%2F..%2Ftar.png' } let_it_be(:file_url) { 'https://example.com/ava%2F..%2Ftar.png' }

View file

@ -3,6 +3,8 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::Harbor::Query do RSpec.describe Gitlab::Harbor::Query do
using RSpec::Parameterized::TableSyntax
let_it_be(:harbor_integration) { create(:harbor_integration) } let_it_be(:harbor_integration) { create(:harbor_integration) }
let(:params) { {} } let(:params) { {} }
@ -111,19 +113,20 @@ RSpec.describe Gitlab::Harbor::Query do
end end
context 'search' do context 'search' do
context 'with valid search' do where(:search_param, :is_valid) do
let(:params) { { search: 'name=desc' } } "name=desc" | true
"name=value1,name=value-2" | true
it 'initialize successfully' do "name=value1,name=value_2" | false
expect(query.valid?).to eq(true) "name=desc,key=value" | false
end "name=value1, name=value2" | false
"name" | false
end end
context 'with invalid search' do with_them do
let(:params) { { search: 'blabla' } } let(:params) { { search: search_param } }
it 'initialize failed' do it "validates according to the regex" do
expect(query.valid?).to eq(false) expect(query.valid?).to eq(is_valid)
end end
end end
end end

View file

@ -5,13 +5,16 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importers do RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importers do
include ExportFileHelper include ExportFileHelper
let(:path) { "#{Dir.tmpdir}/symlink_test" }
let(:archive) { 'spec/fixtures/symlink_export.tar.gz' }
let(:shared) { Gitlab::ImportExport::Shared.new(nil) } let(:shared) { Gitlab::ImportExport::Shared.new(nil) }
let(:tmpdir) { Dir.mktmpdir } # Separate where files are written during this test by their kind, to avoid them interfering with each other:
# - `source_dir` Dir to compress files from.
# - `target_dir` Dir to decompress archived files into.
# - `archive_dir` Dir to write any archive files to.
let(:source_dir) { Dir.mktmpdir }
let(:target_dir) { Dir.mktmpdir }
let(:archive_dir) { Dir.mktmpdir } let(:archive_dir) { Dir.mktmpdir }
subject do subject(:mock_class) do
Class.new do Class.new do
include Gitlab::ImportExport::CommandLineUtil include Gitlab::ImportExport::CommandLineUtil
@ -25,38 +28,59 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
end end
before do before do
FileUtils.mkdir_p(path) FileUtils.mkdir_p(source_dir)
end end
after do after do
FileUtils.rm_rf(path) FileUtils.rm_rf(source_dir)
FileUtils.rm_rf(target_dir)
FileUtils.rm_rf(archive_dir) FileUtils.rm_rf(archive_dir)
FileUtils.remove_entry(tmpdir)
end end
shared_examples 'deletes symlinks' do |compression, decompression| shared_examples 'deletes symlinks' do |compression, decompression|
it 'deletes the symlinks', :aggregate_failures do it 'deletes the symlinks', :aggregate_failures do
Dir.mkdir("#{tmpdir}/.git") Dir.mkdir("#{source_dir}/.git")
Dir.mkdir("#{tmpdir}/folder") Dir.mkdir("#{source_dir}/folder")
FileUtils.touch("#{tmpdir}/file.txt") FileUtils.touch("#{source_dir}/file.txt")
FileUtils.touch("#{tmpdir}/folder/file.txt") FileUtils.touch("#{source_dir}/folder/file.txt")
FileUtils.touch("#{tmpdir}/.gitignore") FileUtils.touch("#{source_dir}/.gitignore")
FileUtils.touch("#{tmpdir}/.git/config") FileUtils.touch("#{source_dir}/.git/config")
File.symlink('file.txt', "#{tmpdir}/.symlink") File.symlink('file.txt', "#{source_dir}/.symlink")
File.symlink('file.txt', "#{tmpdir}/.git/.symlink") File.symlink('file.txt', "#{source_dir}/.git/.symlink")
File.symlink('file.txt', "#{tmpdir}/folder/.symlink") File.symlink('file.txt', "#{source_dir}/folder/.symlink")
archive = File.join(archive_dir, 'archive') archive_file = File.join(archive_dir, 'symlink_archive.tar.gz')
subject.public_send(compression, archive: archive, dir: tmpdir) subject.public_send(compression, archive: archive_file, dir: source_dir)
subject.public_send(decompression, archive: archive_file, dir: target_dir)
subject.public_send(decompression, archive: archive, dir: archive_dir) expect(File).to exist("#{target_dir}/file.txt")
expect(File).to exist("#{target_dir}/folder/file.txt")
expect(File).to exist("#{target_dir}/.gitignore")
expect(File).to exist("#{target_dir}/.git/config")
expect(File).not_to exist("#{target_dir}/.symlink")
expect(File).not_to exist("#{target_dir}/.git/.symlink")
expect(File).not_to exist("#{target_dir}/folder/.symlink")
end
end
expect(File.exist?("#{archive_dir}/file.txt")).to eq(true) shared_examples 'handles shared hard links' do |compression, decompression|
expect(File.exist?("#{archive_dir}/folder/file.txt")).to eq(true) let(:archive_file) { File.join(archive_dir, 'hard_link_archive.tar.gz') }
expect(File.exist?("#{archive_dir}/.gitignore")).to eq(true)
expect(File.exist?("#{archive_dir}/.git/config")).to eq(true) subject(:decompress) { mock_class.public_send(decompression, archive: archive_file, dir: target_dir) }
expect(File.exist?("#{archive_dir}/.symlink")).to eq(false)
expect(File.exist?("#{archive_dir}/.git/.symlink")).to eq(false) before do
expect(File.exist?("#{archive_dir}/folder/.symlink")).to eq(false) Dir.mkdir("#{source_dir}/dir")
FileUtils.touch("#{source_dir}/file.txt")
FileUtils.touch("#{source_dir}/dir/.file.txt")
FileUtils.link("#{source_dir}/file.txt", "#{source_dir}/.hard_linked_file.txt")
mock_class.public_send(compression, archive: archive_file, dir: source_dir)
end
it 'raises an exception and deletes the extraction dir', :aggregate_failures do
expect(FileUtils).to receive(:remove_dir).with(target_dir).and_call_original
expect(Dir).to exist(target_dir)
expect { decompress }.to raise_error(described_class::HardLinkError)
expect(Dir).not_to exist(target_dir)
end end
end end
@ -212,6 +236,8 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
end end
describe '#gzip' do describe '#gzip' do
let(:path) { source_dir }
it 'compresses specified file' do it 'compresses specified file' do
tempfile = Tempfile.new('test', path) tempfile = Tempfile.new('test', path)
filename = File.basename(tempfile.path) filename = File.basename(tempfile.path)
@ -229,14 +255,16 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
end end
describe '#gunzip' do describe '#gunzip' do
let(:path) { source_dir }
it 'decompresses specified file' do it 'decompresses specified file' do
filename = 'labels.ndjson.gz' filename = 'labels.ndjson.gz'
gz_filepath = "spec/fixtures/bulk_imports/gz/#{filename}" gz_filepath = "spec/fixtures/bulk_imports/gz/#{filename}"
FileUtils.copy_file(gz_filepath, File.join(tmpdir, filename)) FileUtils.copy_file(gz_filepath, File.join(path, filename))
subject.gunzip(dir: tmpdir, filename: filename) subject.gunzip(dir: path, filename: filename)
expect(File.exist?(File.join(tmpdir, 'labels.ndjson'))).to eq(true) expect(File.exist?(File.join(path, 'labels.ndjson'))).to eq(true)
end end
context 'when exception occurs' do context 'when exception occurs' do
@ -250,7 +278,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
it 'archives a folder without compression' do it 'archives a folder without compression' do
archive_file = File.join(archive_dir, 'archive.tar') archive_file = File.join(archive_dir, 'archive.tar')
result = subject.tar_cf(archive: archive_file, dir: tmpdir) result = subject.tar_cf(archive: archive_file, dir: source_dir)
expect(result).to eq(true) expect(result).to eq(true)
expect(File.exist?(archive_file)).to eq(true) expect(File.exist?(archive_file)).to eq(true)
@ -270,29 +298,35 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
end end
describe '#untar_zxf' do describe '#untar_zxf' do
let(:tar_archive_fixture) { 'spec/fixtures/symlink_export.tar.gz' }
it_behaves_like 'deletes symlinks', :tar_czf, :untar_zxf it_behaves_like 'deletes symlinks', :tar_czf, :untar_zxf
it_behaves_like 'handles shared hard links', :tar_czf, :untar_zxf
it 'has the right mask for project.json' do it 'has the right mask for project.json' do
subject.untar_zxf(archive: archive, dir: path) subject.untar_zxf(archive: tar_archive_fixture, dir: target_dir)
expect(file_permissions("#{path}/project.json")).to eq(0755) # originally 777 expect(file_permissions("#{target_dir}/project.json")).to eq(0755) # originally 777
end end
it 'has the right mask for uploads' do it 'has the right mask for uploads' do
subject.untar_zxf(archive: archive, dir: path) subject.untar_zxf(archive: tar_archive_fixture, dir: target_dir)
expect(file_permissions("#{path}/uploads")).to eq(0755) # originally 555 expect(file_permissions("#{target_dir}/uploads")).to eq(0755) # originally 555
end end
end end
describe '#untar_xf' do describe '#untar_xf' do
let(:tar_archive_fixture) { 'spec/fixtures/symlink_export.tar.gz' }
it_behaves_like 'deletes symlinks', :tar_cf, :untar_xf it_behaves_like 'deletes symlinks', :tar_cf, :untar_xf
it_behaves_like 'handles shared hard links', :tar_cf, :untar_xf
it 'extracts archive without decompression' do it 'extracts archive without decompression' do
filename = 'archive.tar.gz' filename = 'archive.tar.gz'
archive_file = File.join(archive_dir, 'archive.tar') archive_file = File.join(archive_dir, 'archive.tar')
FileUtils.copy_file(archive, File.join(archive_dir, filename)) FileUtils.copy_file(tar_archive_fixture, File.join(archive_dir, filename))
subject.gunzip(dir: archive_dir, filename: filename) subject.gunzip(dir: archive_dir, filename: filename)
result = subject.untar_xf(archive: archive_file, dir: archive_dir) result = subject.untar_xf(archive: archive_file, dir: archive_dir)

View file

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_category: :importers do
let_it_be(:filepath) { File.join(Dir.tmpdir, 'decompressed_archive_size_validator_spec.gz') } let_it_be(:filepath) { File.join(Dir.tmpdir, 'decompressed_archive_size_validator_spec.gz') }
before(:all) do before(:all) do
@ -121,7 +121,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
context 'which archive path is a symlink' do context 'which archive path is a symlink' do
let(:filepath) { File.join(Dir.tmpdir, 'symlink') } let(:filepath) { File.join(Dir.tmpdir, 'symlink') }
let(:error_message) { 'Archive path is a symlink' } let(:error_message) { 'Archive path is a symlink or hard link' }
before do before do
FileUtils.ln_s(filepath, filepath, force: true) FileUtils.ln_s(filepath, filepath, force: true)
@ -132,6 +132,19 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
end end
end end
context 'when archive path shares multiple hard links' do
let(:filesize) { 32 }
let(:error_message) { 'Archive path is a symlink or hard link' }
before do
FileUtils.link(filepath, File.join(Dir.mktmpdir, 'hard_link'))
end
it 'returns false' do
expect(subject).not_to be_valid
end
end
context 'when archive path is not a file' do context 'when archive path is not a file' do
let(:filepath) { Dir.mktmpdir } let(:filepath) { Dir.mktmpdir }
let(:filesize) { File.size(filepath) } let(:filesize) { File.size(filepath) }

View file

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::ImportExport::FileImporter do RSpec.describe Gitlab::ImportExport::FileImporter, feature_category: :importers do
include ExportFileHelper include ExportFileHelper
let(:shared) { Gitlab::ImportExport::Shared.new(nil) } let(:shared) { Gitlab::ImportExport::Shared.new(nil) }
@ -113,28 +113,73 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
end end
context 'error' do context 'error' do
subject(:import) { described_class.import(importable: build(:project), archive_file: '', shared: shared) }
before do before do
allow_next_instance_of(described_class) do |instance| allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:wait_for_archived_file).and_raise(StandardError) allow(instance).to receive(:wait_for_archived_file).and_raise(StandardError, 'foo')
end end
described_class.import(importable: build(:project), archive_file: '', shared: shared)
end end
it 'removes symlinks in root folder' do it 'removes symlinks in root folder' do
import
expect(File.exist?(symlink_file)).to be false expect(File.exist?(symlink_file)).to be false
end end
it 'removes hidden symlinks in root folder' do it 'removes hidden symlinks in root folder' do
import
expect(File.exist?(hidden_symlink_file)).to be false expect(File.exist?(hidden_symlink_file)).to be false
end end
it 'removes symlinks in subfolders' do it 'removes symlinks in subfolders' do
import
expect(File.exist?(subfolder_symlink_file)).to be false expect(File.exist?(subfolder_symlink_file)).to be false
end end
it 'does not remove a valid file' do it 'does not remove a valid file' do
import
expect(File.exist?(valid_file)).to be true expect(File.exist?(valid_file)).to be true
end end
it 'returns false and sets an error on shared' do
result = import
expect(result).to eq(false)
expect(shared.errors.join).to eq('foo')
end
context 'when files in the archive share hard links' do
let(:hard_link_file) { "#{shared.export_path}/hard_link_file.txt" }
before do
FileUtils.link(valid_file, hard_link_file)
end
it 'returns false and sets an error on shared' do
result = import
expect(result).to eq(false)
expect(shared.errors.join).to eq('File shares hard link')
end
it 'removes all files in export path' do
expect(Dir).to exist(shared.export_path)
expect(File).to exist(symlink_file)
expect(File).to exist(hard_link_file)
expect(File).to exist(valid_file)
import
expect(File).not_to exist(symlink_file)
expect(File).not_to exist(hard_link_file)
expect(File).not_to exist(valid_file)
expect(Dir).not_to exist(shared.export_path)
end
end
end end
context 'when file exceeds acceptable decompressed size' do context 'when file exceeds acceptable decompressed size' do
@ -157,8 +202,10 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
allow(Gitlab::ImportExport::DecompressedArchiveSizeValidator).to receive(:max_bytes).and_return(1) allow(Gitlab::ImportExport::DecompressedArchiveSizeValidator).to receive(:max_bytes).and_return(1)
end end
it 'returns false' do it 'returns false and sets an error on shared' do
expect(subject.import).to eq(false) result = subject.import
expect(result).to eq(false)
expect(shared.errors.join).to eq('Decompressed archive size validation failed.') expect(shared.errors.join).to eq('Decompressed archive size validation failed.')
end end
end end

View file

@ -35,16 +35,22 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonReader, feature_category: :impo
expect(subject).to eq(root_tree) expect(subject).to eq(root_tree)
end end
context 'when project.json is symlink' do context 'when project.json is symlink or hard link' do
it 'raises error an error' do using RSpec::Parameterized::TableSyntax
Dir.mktmpdir do |tmpdir|
FileUtils.touch(File.join(tmpdir, 'passwd'))
File.symlink(File.join(tmpdir, 'passwd'), File.join(tmpdir, 'project.json'))
ndjson_reader = described_class.new(tmpdir) where(:link_method) { [:link, :symlink] }
expect { ndjson_reader.consume_attributes(importable_path) } with_them do
.to raise_error(Gitlab::ImportExport::Error, 'Invalid file') it 'raises an error' do
Dir.mktmpdir do |tmpdir|
FileUtils.touch(File.join(tmpdir, 'passwd'))
FileUtils.send(link_method, File.join(tmpdir, 'passwd'), File.join(tmpdir, 'project.json'))
ndjson_reader = described_class.new(tmpdir)
expect { ndjson_reader.consume_attributes(importable_path) }
.to raise_error(Gitlab::ImportExport::Error, 'Invalid file')
end
end end
end end
end end
@ -97,18 +103,24 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonReader, feature_category: :impo
end end
end end
context 'when relation file is a symlink' do context 'when relation file is a symlink or hard link' do
it 'yields nothing to the Enumerator' do using RSpec::Parameterized::TableSyntax
Dir.mktmpdir do |tmpdir|
Dir.mkdir(File.join(tmpdir, 'project'))
File.write(File.join(tmpdir, 'passwd'), "{}\n{}")
File.symlink(File.join(tmpdir, 'passwd'), File.join(tmpdir, 'project', 'issues.ndjson'))
ndjson_reader = described_class.new(tmpdir) where(:link_method) { [:link, :symlink] }
result = ndjson_reader.consume_relation(importable_path, 'issues') with_them do
it 'yields nothing to the Enumerator' do
Dir.mktmpdir do |tmpdir|
Dir.mkdir(File.join(tmpdir, 'project'))
File.write(File.join(tmpdir, 'passwd'), "{}\n{}")
FileUtils.send(link_method, File.join(tmpdir, 'passwd'), File.join(tmpdir, 'project', 'issues.ndjson'))
expect(result.to_a).to eq([]) ndjson_reader = described_class.new(tmpdir)
result = ndjson_reader.consume_relation(importable_path, 'issues')
expect(result.to_a).to eq([])
end
end end
end end
end end

View file

@ -4,15 +4,17 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::RecursiveMergeFolders do RSpec.describe Gitlab::ImportExport::RecursiveMergeFolders do
describe '.merge' do describe '.merge' do
it 'merge folder and ignore symlinks' do it 'merges folder and ignores symlinks and files that share hard links' do
Dir.mktmpdir do |tmpdir| Dir.mktmpdir do |tmpdir|
source = "#{tmpdir}/source" source = "#{tmpdir}/source"
FileUtils.mkdir_p("#{source}/folder/folder") FileUtils.mkdir_p("#{source}/folder/folder")
FileUtils.touch("#{source}/file1.txt") FileUtils.touch("#{source}/file1.txt")
FileUtils.touch("#{source}/file_that_shares_hard_links.txt")
FileUtils.touch("#{source}/folder/file2.txt") FileUtils.touch("#{source}/folder/file2.txt")
FileUtils.touch("#{source}/folder/folder/file3.txt") FileUtils.touch("#{source}/folder/folder/file3.txt")
FileUtils.ln_s("#{source}/file1.txt", "#{source}/symlink-file1.txt") FileUtils.ln_s("#{source}/file1.txt", "#{source}/symlink-file1.txt")
FileUtils.ln_s("#{source}/folder", "#{source}/symlink-folder") FileUtils.ln_s("#{source}/folder", "#{source}/symlink-folder")
FileUtils.link("#{source}/file_that_shares_hard_links.txt", "#{source}/hard_link.txt")
target = "#{tmpdir}/target" target = "#{tmpdir}/target"
FileUtils.mkdir_p("#{target}/folder/folder") FileUtils.mkdir_p("#{target}/folder/folder")

View file

@ -9,6 +9,10 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
project.update_pages_deployment!(create(:pages_deployment, project: project)) project.update_pages_deployment!(create(:pages_deployment, project: project))
end end
before do
stub_pages_setting(host: 'example.com')
end
it 'returns nil when host is empty' do it 'returns nil when host is empty' do
expect(described_class.new(nil).execute).to be_nil expect(described_class.new(nil).execute).to be_nil
expect(described_class.new('').execute).to be_nil expect(described_class.new('').execute).to be_nil
@ -69,7 +73,7 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
end end
it 'returns the virual domain with no lookup_paths' do it 'returns the virual domain with no lookup_paths' do
virtual_domain = described_class.new("#{project.namespace.path}.#{Settings.pages.host}").execute virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/) expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
@ -82,7 +86,7 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
end end
it 'returns the virual domain with no lookup_paths' do it 'returns the virual domain with no lookup_paths' do
virtual_domain = described_class.new("#{project.namespace.path}.#{Settings.pages.host}".downcase).execute virtual_domain = described_class.new("#{project.namespace.path}.example.com".downcase).execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to be_nil expect(virtual_domain.cache_key).to be_nil
@ -104,7 +108,7 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
end end
it 'returns the virual domain when there are pages deployed for the project' do it 'returns the virual domain when there are pages deployed for the project' do
virtual_domain = described_class.new("#{project.namespace.path}.#{Settings.pages.host}").execute virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/) expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
@ -113,7 +117,7 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
end end
it 'finds domain with case-insensitive' do it 'finds domain with case-insensitive' do
virtual_domain = described_class.new("#{project.namespace.path}.#{Settings.pages.host.upcase}").execute virtual_domain = described_class.new("#{project.namespace.path}.Example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/) expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
@ -127,7 +131,7 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
end end
it 'returns the virual domain when there are pages deployed for the project' do it 'returns the virual domain when there are pages deployed for the project' do
virtual_domain = described_class.new("#{project.namespace.path}.#{Settings.pages.host}").execute virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to be_nil expect(virtual_domain.cache_key).to be_nil
@ -143,7 +147,7 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
project.project_setting.update!(pages_unique_domain: 'unique-domain') project.project_setting.update!(pages_unique_domain: 'unique-domain')
end end
subject(:virtual_domain) { described_class.new("unique-domain.#{Settings.pages.host.upcase}").execute } subject(:virtual_domain) { described_class.new('unique-domain.example.com').execute }
context 'when pages unique domain is enabled' do context 'when pages unique domain is enabled' do
before_all do before_all do
@ -171,6 +175,19 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end end
context 'when a project path conflicts with a unique domain' do
it 'prioritizes the unique domain project' do
group = create(:group, path: 'unique-domain')
other_project = build(:project, path: 'unique-domain.example.com', group: group)
other_project.save!(validate: false)
other_project.update_pages_deployment!(create(:pages_deployment, project: other_project))
other_project.mark_pages_as_deployed
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
end
context 'when :cache_pages_domain_api is disabled' do context 'when :cache_pages_domain_api is disabled' do
before do before do
stub_feature_flags(cache_pages_domain_api: false) stub_feature_flags(cache_pages_domain_api: false)

View file

@ -0,0 +1,59 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe Gitlab::Plantuml, feature_category: :shared do
describe ".configure" do
subject { described_class.configure }
let(:plantuml_url) { "http://plantuml.foo.bar" }
before do
allow(Gitlab::CurrentSettings).to receive(:plantuml_url).and_return(plantuml_url)
end
context "when PlantUML is enabled" do
before do
allow(Gitlab::CurrentSettings).to receive(:plantuml_enabled).and_return(true)
end
it "configures the endpoint URL" do
expect(subject.url).to eq(plantuml_url)
end
it "enables PNG support" do
expect(subject.png_enable).to be_truthy
end
it "disables SVG support" do
expect(subject.svg_enable).to be_falsey
end
it "disables TXT support" do
expect(subject.txt_enable).to be_falsey
end
end
context "when PlantUML is disabled" do
before do
allow(Gitlab::CurrentSettings).to receive(:plantuml_enabled).and_return(false)
end
it "configures the endpoint URL" do
expect(subject.url).to eq(plantuml_url)
end
it "enables PNG support" do
expect(subject.png_enable).to be_falsey
end
it "disables SVG support" do
expect(subject.svg_enable).to be_falsey
end
it "disables TXT support" do
expect(subject.txt_enable).to be_falsey
end
end
end
end

View file

@ -0,0 +1,88 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Utils::FileInfo, feature_category: :shared do
let(:tmpdir) { Dir.mktmpdir }
let(:file_path) { "#{tmpdir}/test.txt" }
before do
FileUtils.touch(file_path)
end
after do
FileUtils.rm_rf(tmpdir)
end
describe '.linked?' do
it 'raises an error when file does not exist' do
expect { subject.linked?('foo') }.to raise_error(Errno::ENOENT)
end
shared_examples 'identifies a linked file' do
it 'returns false when file or dir is not a link' do
expect(subject.linked?(tmpdir)).to eq(false)
expect(subject.linked?(file)).to eq(false)
end
it 'returns true when file or dir is symlinked' do
FileUtils.symlink(tmpdir, "#{tmpdir}/symlinked_dir")
FileUtils.symlink(file_path, "#{tmpdir}/symlinked_file.txt")
expect(subject.linked?("#{tmpdir}/symlinked_dir")).to eq(true)
expect(subject.linked?("#{tmpdir}/symlinked_file.txt")).to eq(true)
end
it 'returns true when file has more than one hard link' do
FileUtils.link(file_path, "#{tmpdir}/hardlinked_file.txt")
expect(subject.linked?(file)).to eq(true)
expect(subject.linked?("#{tmpdir}/hardlinked_file.txt")).to eq(true)
end
end
context 'when file is a File::Stat' do
let(:file) { File.lstat(file_path) }
it_behaves_like 'identifies a linked file'
end
context 'when file is path' do
let(:file) { file_path }
it_behaves_like 'identifies a linked file'
end
end
describe '.shares_hard_link?' do
it 'raises an error when file does not exist' do
expect { subject.shares_hard_link?('foo') }.to raise_error(Errno::ENOENT)
end
shared_examples 'identifies a file that shares a hard link' do
it 'returns false when file or dir does not share hard links' do
expect(subject.shares_hard_link?(tmpdir)).to eq(false)
expect(subject.shares_hard_link?(file)).to eq(false)
end
it 'returns true when file has more than one hard link' do
FileUtils.link(file_path, "#{tmpdir}/hardlinked_file.txt")
expect(subject.shares_hard_link?(file)).to eq(true)
expect(subject.shares_hard_link?("#{tmpdir}/hardlinked_file.txt")).to eq(true)
end
end
context 'when file is a File::Stat' do
let(:file) { File.lstat(file_path) }
it_behaves_like 'identifies a file that shares a hard link'
end
context 'when file is path' do
let(:file) { file_path }
it_behaves_like 'identifies a file that shares a hard link'
end
end
end

View file

@ -25,7 +25,7 @@ RSpec.describe JSONWebToken::HMACToken do
end end
describe '.decode' do describe '.decode' do
let(:leeway) { described_class::IAT_LEEWAY } let(:leeway) { described_class::LEEWAY }
let(:decoded_token) { described_class.decode(encoded_token, secret, leeway: leeway) } let(:decoded_token) { described_class.decode(encoded_token, secret, leeway: leeway) }
context 'with an invalid token' do context 'with an invalid token' do

View file

@ -77,12 +77,36 @@ RSpec.describe ProjectSetting, type: :model, feature_category: :projects do
expect(project_setting).not_to be_valid expect(project_setting).not_to be_valid
expect(project_setting.errors.full_messages).to include("Pages unique domain has already been taken") expect(project_setting.errors.full_messages).to include("Pages unique domain has already been taken")
end end
it "validates if the pages_unique_domain already exist as a project path" do
stub_pages_setting(host: 'example.com')
create(:project, path: "random-unique-domain.example.com")
project_setting = build(:project_setting, pages_unique_domain: "random-unique-domain")
expect(project_setting).not_to be_valid
expect(project_setting.errors.full_messages_for(:pages_unique_domain))
.to match(["Pages unique domain already in use"])
end
context "when updating" do
it "validates if the pages_unique_domain already exist as a project path" do
stub_pages_setting(host: 'example.com')
project_setting = create(:project_setting)
create(:project, path: "random-unique-domain.example.com")
expect(project_setting.update(pages_unique_domain: "random-unique-domain")).to eq(false)
expect(project_setting.errors.full_messages_for(:pages_unique_domain))
.to match(["Pages unique domain already in use"])
end
end
end end
describe 'target_platforms=' do describe 'target_platforms=' do
it 'stringifies and sorts' do it 'stringifies and sorts' do
project_setting = build(:project_setting, target_platforms: [:watchos, :ios]) project_setting = build(:project_setting, target_platforms: [:watchos, :ios])
expect(project_setting.target_platforms).to eq %w(ios watchos) expect(project_setting.target_platforms).to eq %w[ios watchos]
end end
end end

View file

@ -830,6 +830,37 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
expect(project).to be_valid expect(project).to be_valid
end end
context 'when validating if path already exist as pages unique domain' do
before do
stub_pages_setting(host: 'example.com')
end
it 'rejects paths that match pages unique domain' do
create(:project_setting, pages_unique_domain: 'some-unique-domain')
project = build(:project, path: 'some-unique-domain.example.com')
expect(project).not_to be_valid
expect(project.errors.full_messages_for(:path)).to match(['Path already in use'])
end
it 'accepts path when the host does not match' do
create(:project_setting, pages_unique_domain: 'some-unique-domain')
project = build(:project, path: 'some-unique-domain.another-example.com')
expect(project).to be_valid
end
it 'accepts path when the domain does not match' do
create(:project_setting, pages_unique_domain: 'another-unique-domain')
project = build(:project, path: 'some-unique-domain.example.com')
expect(project).to be_valid
end
end
context 'path is unchanged' do context 'path is unchanged' do
let_it_be(:invalid_path_project) do let_it_be(:invalid_path_project) do
project = create(:project, :repository, :public) project = create(:project, :repository, :public)
@ -4825,6 +4856,33 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL) project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
end end
context 'when validating if path already exist as pages unique domain' do
before do
stub_pages_setting(host: 'example.com')
end
it 'rejects paths that match pages unique domain' do
stub_pages_setting(host: 'example.com')
create(:project_setting, pages_unique_domain: 'some-unique-domain')
expect(project.update(path: 'some-unique-domain.example.com')).to eq(false)
expect(project.errors.full_messages_for(:path)).to match(['Path already in use'])
end
it 'accepts path when the host does not match' do
create(:project_setting, pages_unique_domain: 'some-unique-domain')
expect(project.update(path: 'some-unique-domain.another-example.com')).to eq(true)
end
it 'accepts path when the domain does not match' do
stub_pages_setting(host: 'example.com')
create(:project_setting, pages_unique_domain: 'another-unique-domain')
expect(project.update(path: 'some-unique-domain.example.com')).to eq(true)
end
end
it 'does not validate the visibility' do it 'does not validate the visibility' do
expect(project).not_to receive(:visibility_level_allowed_as_fork).and_call_original expect(project).not_to receive(:visibility_level_allowed_as_fork).and_call_original
expect(project).not_to receive(:visibility_level_allowed_by_group).and_call_original expect(project).not_to receive(:visibility_level_allowed_by_group).and_call_original

View file

@ -2,10 +2,13 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Ci::PipelineSchedulePolicy, :models, :clean_gitlab_redis_cache do RSpec.describe Ci::PipelineSchedulePolicy, :models, :clean_gitlab_redis_cache, feature_category: :continuous_integration do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) } let_it_be_with_reload(:project) { create(:project, :repository, create_tag: tag_ref_name) }
let_it_be(:pipeline_schedule, reload: true) { create(:ci_pipeline_schedule, :nightly, project: project) } let_it_be_with_reload(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) }
let_it_be(:tag_ref_name) { "v1.0.0" }
let(:policy) do let(:policy) do
described_class.new(user, pipeline_schedule) described_class.new(user, pipeline_schedule)
@ -13,51 +16,143 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models, :clean_gitlab_redis_cache do
describe 'rules' do describe 'rules' do
describe 'rules for protected ref' do describe 'rules for protected ref' do
before do context 'for branch' do
project.add_developer(user) %w[refs/heads/master master].each do |branch_ref|
end context "with #{branch_ref}" do
let_it_be(:branch_ref_name) { "master" }
let_it_be(:branch_pipeline_schedule) do
create(:ci_pipeline_schedule, :nightly, project: project, ref: branch_ref)
end
context 'when no one can push or merge to the branch' do where(:push_access_level, :merge_access_level, :project_role, :accessible) do
before do :no_one_can_push | :no_one_can_merge | :owner | :be_disallowed
create(:protected_branch, :no_one_can_push, name: pipeline_schedule.ref, project: project) :no_one_can_push | :no_one_can_merge | :maintainer | :be_disallowed
end :no_one_can_push | :no_one_can_merge | :developer | :be_disallowed
:no_one_can_push | :no_one_can_merge | :reporter | :be_disallowed
:no_one_can_push | :no_one_can_merge | :guest | :be_disallowed
it 'does not include ability to play pipeline schedule' do :maintainers_can_push | :no_one_can_merge | :owner | :be_allowed
expect(policy).to be_disallowed :play_pipeline_schedule :maintainers_can_push | :no_one_can_merge | :maintainer | :be_allowed
:maintainers_can_push | :no_one_can_merge | :developer | :be_disallowed
:maintainers_can_push | :no_one_can_merge | :reporter | :be_disallowed
:maintainers_can_push | :no_one_can_merge | :guest | :be_disallowed
:developers_can_push | :no_one_can_merge | :owner | :be_allowed
:developers_can_push | :no_one_can_merge | :maintainer | :be_allowed
:developers_can_push | :no_one_can_merge | :developer | :be_allowed
:developers_can_push | :no_one_can_merge | :reporter | :be_disallowed
:developers_can_push | :no_one_can_merge | :guest | :be_disallowed
:no_one_can_push | :maintainers_can_merge | :owner | :be_allowed
:no_one_can_push | :maintainers_can_merge | :maintainer | :be_allowed
:no_one_can_push | :maintainers_can_merge | :developer | :be_disallowed
:no_one_can_push | :maintainers_can_merge | :reporter | :be_disallowed
:no_one_can_push | :maintainers_can_merge | :guest | :be_disallowed
:maintainers_can_push | :maintainers_can_merge | :owner | :be_allowed
:maintainers_can_push | :maintainers_can_merge | :maintainer | :be_allowed
:maintainers_can_push | :maintainers_can_merge | :developer | :be_disallowed
:maintainers_can_push | :maintainers_can_merge | :reporter | :be_disallowed
:maintainers_can_push | :maintainers_can_merge | :guest | :be_disallowed
:developers_can_push | :maintainers_can_merge | :owner | :be_allowed
:developers_can_push | :maintainers_can_merge | :maintainer | :be_allowed
:developers_can_push | :maintainers_can_merge | :developer | :be_allowed
:developers_can_push | :maintainers_can_merge | :reporter | :be_disallowed
:developers_can_push | :maintainers_can_merge | :guest | :be_disallowed
:no_one_can_push | :developers_can_merge | :owner | :be_allowed
:no_one_can_push | :developers_can_merge | :maintainer | :be_allowed
:no_one_can_push | :developers_can_merge | :developer | :be_allowed
:no_one_can_push | :developers_can_merge | :reporter | :be_disallowed
:no_one_can_push | :developers_can_merge | :guest | :be_disallowed
:maintainers_can_push | :developers_can_merge | :owner | :be_allowed
:maintainers_can_push | :developers_can_merge | :maintainer | :be_allowed
:maintainers_can_push | :developers_can_merge | :developer | :be_allowed
:maintainers_can_push | :developers_can_merge | :reporter | :be_disallowed
:maintainers_can_push | :developers_can_merge | :guest | :be_disallowed
:developers_can_push | :developers_can_merge | :owner | :be_allowed
:developers_can_push | :developers_can_merge | :maintainer | :be_allowed
:developers_can_push | :developers_can_merge | :developer | :be_allowed
:developers_can_push | :developers_can_merge | :reporter | :be_disallowed
:developers_can_push | :developers_can_merge | :guest | :be_disallowed
end
with_them do
before do
create(:protected_branch, push_access_level, merge_access_level, name: branch_ref_name,
project: project)
project.add_role(user, project_role)
end
context 'for create_pipeline_schedule' do
subject(:policy) { described_class.new(user, new_branch_pipeline_schedule) }
let(:new_branch_pipeline_schedule) { project.pipeline_schedules.new(ref: branch_ref) }
it { expect(policy).to try(accessible, :create_pipeline_schedule) }
end
context 'for play_pipeline_schedule' do
subject(:policy) { described_class.new(user, branch_pipeline_schedule) }
it { expect(policy).to try(accessible, :play_pipeline_schedule) }
end
end
end
end end
end end
context 'when developers can push to the branch' do context 'for tag' do
before do %w[refs/tags/v1.0.0 v1.0.0].each do |tag_ref|
create(:protected_branch, :developers_can_merge, name: pipeline_schedule.ref, project: project) context "with #{tag_ref}" do
end let_it_be(:tag_pipeline_schedule) do
create(:ci_pipeline_schedule, :nightly, project: project, ref: tag_ref)
end
it 'includes ability to update pipeline' do where(:access_level, :project_role, :accessible) do
expect(policy).to be_allowed :play_pipeline_schedule :no_one_can_create | :owner | :be_disallowed
end :no_one_can_create | :maintainer | :be_disallowed
end :no_one_can_create | :developer | :be_disallowed
:no_one_can_create | :reporter | :be_disallowed
:no_one_can_create | :guest | :be_disallowed
context 'when no one can create the tag' do :maintainers_can_create | :owner | :be_allowed
let(:tag) { 'v1.0.0' } :maintainers_can_create | :maintainer | :be_allowed
:maintainers_can_create | :developer | :be_disallowed
:maintainers_can_create | :reporter | :be_disallowed
:maintainers_can_create | :guest | :be_disallowed
before do :developers_can_create | :owner | :be_allowed
pipeline_schedule.update!(ref: tag) :developers_can_create | :maintainer | :be_allowed
:developers_can_create | :developer | :be_allowed
:developers_can_create | :reporter | :be_disallowed
:developers_can_create | :guest | :be_disallowed
end
create(:protected_tag, :no_one_can_create, name: pipeline_schedule.ref, project: project) with_them do
end before do
create(:protected_tag, access_level, name: tag_ref_name, project: project)
project.add_role(user, project_role)
end
it 'does not include ability to play pipeline schedule' do context 'for create_pipeline_schedule' do
expect(policy).to be_disallowed :play_pipeline_schedule subject(:policy) { described_class.new(user, new_tag_pipeline_schedule) }
end
end
context 'when no one can create the tag but it is not a tag' do let(:new_tag_pipeline_schedule) { project.pipeline_schedules.new(ref: tag_ref) }
before do
create(:protected_tag, :no_one_can_create, name: pipeline_schedule.ref, project: project)
end
it 'includes ability to play pipeline schedule' do it { expect(policy).to try(accessible, :create_pipeline_schedule) }
expect(policy).to be_allowed :play_pipeline_schedule end
context 'for play_pipeline_schedule' do
subject(:policy) { described_class.new(user, tag_pipeline_schedule) }
it { expect(policy).to try(accessible, :play_pipeline_schedule) }
end
end
end
end end
end end
end end

View file

@ -50,6 +50,17 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
end end
it 'authenticates using a jwt token with an IAT from 10 seconds in the future' do
headers =
travel_to(Time.now + 10.seconds) do
gitlab_shell_internal_api_request_header
end
perform_request(headers: headers)
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 401 when jwt token is expired' do it 'returns 401 when jwt token is expired' do
headers = gitlab_shell_internal_api_request_header headers = gitlab_shell_internal_api_request_header

View file

@ -43,13 +43,21 @@ RSpec.describe BulkImports::ArchiveExtractionService, feature_category: :importe
context 'when archive file is a symlink' do context 'when archive file is a symlink' do
it 'raises an error' do it 'raises an error' do
FileUtils.ln_s(File.join(tmpdir, filename), File.join(tmpdir, 'symlink')) FileUtils.ln_s(filepath, File.join(tmpdir, 'symlink'))
expect { described_class.new(tmpdir: tmpdir, filename: 'symlink').execute } expect { described_class.new(tmpdir: tmpdir, filename: 'symlink').execute }
.to raise_error(BulkImports::Error, 'Invalid file') .to raise_error(BulkImports::Error, 'Invalid file')
end end
end end
context 'when archive file shares multiple hard links' do
it 'raises an error' do
FileUtils.link(filepath, File.join(tmpdir, 'hard_link'))
expect { subject.execute }.to raise_error(BulkImports::Error, 'Invalid file')
end
end
context 'when filepath is being traversed' do context 'when filepath is being traversed' do
it 'raises an error' do it 'raises an error' do
expect { described_class.new(tmpdir: File.join(Dir.mktmpdir, 'test', '..'), filename: 'name').execute } expect { described_class.new(tmpdir: File.join(Dir.mktmpdir, 'test', '..'), filename: 'name').execute }

View file

@ -3,6 +3,8 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::FileDecompressionService, feature_category: :importers do RSpec.describe BulkImports::FileDecompressionService, feature_category: :importers do
using RSpec::Parameterized::TableSyntax
let_it_be(:tmpdir) { Dir.mktmpdir } let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:ndjson_filename) { 'labels.ndjson' } let_it_be(:ndjson_filename) { 'labels.ndjson' }
let_it_be(:ndjson_filepath) { File.join(tmpdir, ndjson_filename) } let_it_be(:ndjson_filepath) { File.join(tmpdir, ndjson_filename) }
@ -70,39 +72,68 @@ RSpec.describe BulkImports::FileDecompressionService, feature_category: :importe
end end
end end
context 'when compressed file is a symlink' do shared_examples 'raises an error and removes the file' do |error_message:|
let_it_be(:symlink) { File.join(tmpdir, 'symlink.gz') } specify do
before do
FileUtils.ln_s(File.join(tmpdir, gz_filename), symlink)
end
subject { described_class.new(tmpdir: tmpdir, filename: 'symlink.gz') }
it 'raises an error and removes the file' do
expect { subject.execute } expect { subject.execute }
.to raise_error(BulkImports::FileDecompressionService::ServiceError, 'File decompression error') .to raise_error(BulkImports::FileDecompressionService::ServiceError, error_message)
expect(File).not_to exist(file)
expect(File.exist?(symlink)).to eq(false)
end end
end end
context 'when decompressed file is a symlink' do shared_context 'when compressed file' do
let_it_be(:symlink) { File.join(tmpdir, 'symlink') } let_it_be(:file) { File.join(tmpdir, 'file.gz') }
subject { described_class.new(tmpdir: tmpdir, filename: 'file.gz') }
before do before do
FileUtils.ln_s(File.join(tmpdir, ndjson_filename), symlink) FileUtils.send(link_method, File.join(tmpdir, gz_filename), file)
subject.instance_variable_set(:@decompressed_filepath, symlink)
end end
end
shared_context 'when decompressed file' do
let_it_be(:file) { File.join(tmpdir, 'file.txt') }
subject { described_class.new(tmpdir: tmpdir, filename: gz_filename) } subject { described_class.new(tmpdir: tmpdir, filename: gz_filename) }
it 'raises an error and removes the file' do before do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid file') original_file = File.join(tmpdir, 'original_file.txt')
FileUtils.touch(original_file)
FileUtils.send(link_method, original_file, file)
expect(File.exist?(symlink)).to eq(false) subject.instance_variable_set(:@decompressed_filepath, file)
end end
end end
context 'when compressed file is a symlink' do
let(:link_method) { :symlink }
include_context 'when compressed file'
include_examples 'raises an error and removes the file', error_message: 'File decompression error'
end
context 'when compressed file shares multiple hard links' do
let(:link_method) { :link }
include_context 'when compressed file'
include_examples 'raises an error and removes the file', error_message: 'File decompression error'
end
context 'when decompressed file is a symlink' do
let(:link_method) { :symlink }
include_context 'when decompressed file'
include_examples 'raises an error and removes the file', error_message: 'Invalid file'
end
context 'when decompressed file shares multiple hard links' do
let(:link_method) { :link }
include_context 'when decompressed file'
include_examples 'raises an error and removes the file', error_message: 'Invalid file'
end
end end
end end

View file

@ -10,7 +10,7 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
let_it_be(:content_type) { 'application/octet-stream' } let_it_be(:content_type) { 'application/octet-stream' }
let_it_be(:content_disposition) { nil } let_it_be(:content_disposition) { nil }
let_it_be(:filename) { 'file_download_service_spec' } let_it_be(:filename) { 'file_download_service_spec' }
let_it_be(:tmpdir) { Dir.tmpdir } let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { File.join(tmpdir, filename) } let_it_be(:filepath) { File.join(tmpdir, filename) }
let_it_be(:content_length) { 1000 } let_it_be(:content_length) { 1000 }
@ -247,6 +247,36 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end end
end end
context 'when file shares multiple hard links' do
let_it_be(:hard_link) { File.join(tmpdir, 'hard_link') }
before do
existing_file = File.join(Dir.mktmpdir, filename)
FileUtils.touch(existing_file)
FileUtils.link(existing_file, hard_link)
end
subject do
described_class.new(
configuration: config,
relative_url: '/test',
tmpdir: tmpdir,
filename: 'hard_link',
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
it 'raises an error and removes the file' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Invalid downloaded file'
)
expect(File.exist?(hard_link)).to eq(false)
end
end
context 'when dir is not in tmpdir' do context 'when dir is not in tmpdir' do
subject do subject do
described_class.new( described_class.new(

View file

@ -6,7 +6,9 @@ RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuo
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:reporter) { create(:user) } let_it_be(:reporter) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) } let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) } let_it_be(:pipeline_schedule) do
create(:ci_pipeline_schedule, project: project, owner: user, ref: 'master')
end
before_all do before_all do
project.add_maintainer(user) project.add_maintainer(user)

View file

@ -1134,10 +1134,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/visual-review-tools/-/visual-review-tools-1.7.3.tgz#9ea641146436da388ffbad25d7f2abe0df52c235" resolved "https://registry.yarnpkg.com/@gitlab/visual-review-tools/-/visual-review-tools-1.7.3.tgz#9ea641146436da388ffbad25d7f2abe0df52c235"
integrity sha512-NMV++7Ew1FSBDN1xiZaauU9tfeSfgDHcOLpn+8bGpP+O5orUPm2Eu66R5eC5gkjBPaXosNAxNWtriee+aFk4+g== integrity sha512-NMV++7Ew1FSBDN1xiZaauU9tfeSfgDHcOLpn+8bGpP+O5orUPm2Eu66R5eC5gkjBPaXosNAxNWtriee+aFk4+g==
"@gitlab/web-ide@0.0.1-dev-20230511143809": "@gitlab/web-ide@0.0.1-dev-20230713160749-patch-1":
version "0.0.1-dev-20230511143809" version "0.0.1-dev-20230713160749-patch-1"
resolved "https://registry.yarnpkg.com/@gitlab/web-ide/-/web-ide-0.0.1-dev-20230511143809.tgz#c13dfb4d1edab2e020d4a102d4ec18048917490f" resolved "https://registry.yarnpkg.com/@gitlab/web-ide/-/web-ide-0.0.1-dev-20230713160749-patch-1.tgz#6420b55aae444533f9a4bd6269503d98a72aaa2e"
integrity sha512-caP5WSaTuIhPrPGUWyvPT4np6swkKQHM1Pa9HiBnGhiOhhQ1+3X/+J9EoZXUhnhwiBzS7sp32Uyttam4am/sTA== integrity sha512-Dh8XQyPwDY6fkd/A+hTHCqrD23u5qnlaxKu5myyxDEgBNGgu4SGblFU9B6NHNm8eGUZk6Cs5MuMk+NUvWRKbmA==
"@graphql-eslint/eslint-plugin@3.18.0": "@graphql-eslint/eslint-plugin@3.18.0":
version "3.18.0" version "3.18.0"