New upstream version 11.5.10+dfsg

This commit is contained in:
Pirate Praveen 2019-02-02 18:00:53 +05:30
parent 86ace60a9c
commit b16c5f7852
148 changed files with 2472 additions and 424 deletions

View file

@ -2,6 +2,45 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 11.5.10 (2019-01-30)
- No changes.
## 11.5.9 (2019-01-29)
### Security (21 changes)
- Make potentially malicious links more visible in the UI and scrub RTLO chars from links. !2770
- Don't process MR refs for guests in the notes. !2771
- Add more LFS validations to prevent forgery.
- Verify that LFS upload requests are genuine.
- Fixed XSS content in KaTex links.
- Prevent awarding emojis to notes whose parent is not visible to user.
- Prevent unauthorized replies when discussion is locked or confidential.
- Disable git v2 protocol temporarily.
- Fix showing ci status for guest users when public pipline are not set.
- Fix contributed projects info still visible when user enable private profile.
- Extract GitLab Pages using RubyZip.
- Disallows unauthorized users from accessing the pipelines section.
- Use common error for unauthenticated users when creating issues.
- Fix slow regex in project reference pattern.
- Fix private user email being visible in push (and tag push) webhooks.
- Fix wiki access rights when external wiki is enabled.
- Fix path disclosure on project import error.
- Restrict project import visibility based on its group.
- Expose CI/CD trigger token only to the trigger owner.
- Notify only users who can access the project on project move.
- Alias GitHub and BitBucket OAuth2 callback URLs.
### Fixed (1 change)
- Fix uninitialized constant with GitLab Pages.
## 11.5.8 (2019-01-28)
- Unreleased due to quality assurance failure.
## 11.5.7 (2019-01-15)
### Security (1 change)

View file

@ -1 +1 @@
0.129.0
0.129.1

View file

@ -1 +1 @@
7.1.3
7.1.4

View file

@ -66,6 +66,7 @@ gem 'u2f', '~> 0.2.1'
# GitLab Pages
gem 'validates_hostname', '~> 1.0.6'
gem 'rubyzip', '~> 1.2.2', require: 'zip'
# Browser detection
gem 'browser', '~> 2.5'

View file

@ -1107,6 +1107,7 @@ DEPENDENCIES
ruby-prof (~> 0.17.0)
ruby-progressbar
ruby_parser (~> 3.8)
rubyzip (~> 1.2.2)
rufus-scheduler (~> 3.4)
rugged (~> 0.27)
sanitize (~> 4.6)

View file

@ -1117,6 +1117,7 @@ DEPENDENCIES
ruby-prof (~> 0.17.0)
ruby-progressbar
ruby_parser (~> 3.8)
rubyzip (~> 1.2.2)
rufus-scheduler (~> 3.4)
rugged (~> 0.27)
sanitize (~> 4.6)

View file

@ -1 +1 @@
11.5.7
11.5.10

View file

@ -8,7 +8,7 @@ class Import::BitbucketController < Import::BaseController
rescue_from Bitbucket::Error::Unauthorized, with: :bitbucket_unauthorized
def callback
response = client.auth_code.get_token(params[:code], redirect_uri: callback_import_bitbucket_url)
response = client.auth_code.get_token(params[:code], redirect_uri: users_import_bitbucket_callback_url)
session[:bitbucket_token] = response.token
session[:bitbucket_expires_at] = response.expires_at
@ -89,7 +89,7 @@ class Import::BitbucketController < Import::BaseController
end
def go_to_bitbucket_for_permissions
redirect_to client.auth_code.authorize_url(redirect_uri: callback_import_bitbucket_url)
redirect_to client.auth_code.authorize_url(redirect_uri: users_import_bitbucket_callback_url)
end
def bitbucket_unauthorized

View file

@ -86,7 +86,7 @@ class Import::GithubController < Import::BaseController
end
def callback_import_url
public_send("callback_import_#{provider}_url", extra_import_params) # rubocop:disable GitlabSecurity/PublicSend
public_send("users_import_#{provider}_callback_url", extra_import_params) # rubocop:disable GitlabSecurity/PublicSend
end
def provider_unauthorized

View file

@ -19,7 +19,7 @@ class Projects::IssuesController < Projects::ApplicationController
prepend_before_action(only: [:index]) { authenticate_sessionless_user!(:rss) }
prepend_before_action(only: [:calendar]) { authenticate_sessionless_user!(:ics) }
prepend_before_action :authenticate_new_issue!, only: [:new]
prepend_before_action :authenticate_user!, only: [:new]
prepend_before_action :store_uri, only: [:new, :show]
before_action :whitelist_query_limiting, only: [:create, :create_merge_request, :move, :bulk_update]
@ -229,14 +229,6 @@ class Projects::IssuesController < Projects::ApplicationController
] + [{ label_ids: [], assignee_ids: [] }]
end
def authenticate_new_issue!
return if current_user
notice = "Please sign in to create the new issue."
redirect_to new_user_session_path, notice: notice
end
def store_uri
if request.get? && !request.xhr?
store_location_for :user, request.fullpath

View file

@ -5,7 +5,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
include WorkhorseRequest
include SendFileUpload
skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
skip_before_action :verify_workhorse_api!, only: :download
def download
lfs_object = LfsObject.find_by_oid(oid)

View file

@ -39,8 +39,11 @@ class Projects::MergeRequests::ApplicationController < Projects::ApplicationCont
end
def set_pipeline_variables
@pipelines = @merge_request.all_pipelines
@pipeline = @merge_request.head_pipeline
@statuses_count = @pipeline.present? ? @pipeline.statuses.relevant.count : 0
@pipelines =
if can?(current_user, :read_pipeline, @project)
@merge_request.all_pipelines
else
Ci::Pipeline.none
end
end
end

View file

@ -4,6 +4,7 @@ class Projects::PipelinesController < Projects::ApplicationController
before_action :whitelist_query_limiting, only: [:create, :retry]
before_action :pipeline, except: [:index, :new, :create, :charts]
before_action :authorize_read_pipeline!
before_action :authorize_read_build!, only: [:index]
before_action :authorize_create_pipeline!, only: [:new, :create]
before_action :authorize_update_pipeline!, only: [:retry, :cancel]

View file

@ -99,7 +99,9 @@ module Projects
def define_triggers_variables
@triggers = @project.triggers
.present(current_user: current_user)
@trigger = ::Ci::Trigger.new
.present(current_user: current_user)
end
def define_badges_variables

View file

@ -66,12 +66,11 @@ class Projects::TriggersController < Projects::ApplicationController
end
def trigger
@trigger ||= project.triggers.find(params[:id]) || render_404
@trigger ||= project.triggers.find(params[:id])
.present(current_user: current_user)
end
def trigger_params
params.require(:trigger).permit(
:description
)
params.require(:trigger).permit(:description)
end
end

View file

@ -14,6 +14,9 @@ class ContributedProjectsFinder < UnionFinder
# Returns an ActiveRecord::Relation.
# rubocop: disable CodeReuse/ActiveRecord
def execute(current_user = nil)
# Do not show contributed projects if the user profile is private.
return Project.none unless can_read_profile?(current_user)
segments = all_projects(current_user)
find_union(segments, Project).includes(:namespace).order_id_desc
@ -22,6 +25,10 @@ class ContributedProjectsFinder < UnionFinder
private
def can_read_profile?(current_user)
Ability.allowed?(current_user, :read_user_profile, @user)
end
def all_projects(current_user)
projects = []

View file

@ -1,12 +0,0 @@
# frozen_string_literal: true
module ExternalWikiHelper
def get_project_wiki_path(project)
external_wiki_service = project.external_wiki
if external_wiki_service
external_wiki_service.properties['external_wiki_url']
else
project_wiki_path(project, :home)
end
end
end

View file

@ -278,7 +278,8 @@ module ProjectsHelper
nav_tabs << :container_registry
end
if project.builds_enabled? && can?(current_user, :read_pipeline, project)
# Pipelines feature is tied to presence of builds
if can?(current_user, :read_build, project)
nav_tabs << :pipelines
end
@ -286,19 +287,24 @@ module ProjectsHelper
nav_tabs << :operations
end
if project.external_issue_tracker
nav_tabs << :external_issue_tracker
end
tab_ability_map.each do |tab, ability|
if can?(current_user, ability, project)
nav_tabs << tab
end
end
nav_tabs << external_nav_tabs(project)
nav_tabs.flatten
end
def external_nav_tabs(project)
[].tap do |tabs|
tabs << :external_issue_tracker if project.external_issue_tracker
tabs << :external_wiki if project.has_external_wiki?
end
end
def tab_ability_map
{
environments: :read_environment,

View file

@ -4,6 +4,7 @@ module Ci
class Trigger < ActiveRecord::Base
extend Gitlab::Ci::Model
include IgnorableColumn
include Presentable
ignore_column :deleted_at
@ -29,7 +30,7 @@ module Ci
end
def short_token
token[0...4]
token[0...4] if token.present?
end
def legacy?

View file

@ -11,6 +11,7 @@ class Commit
include Mentionable
include Referable
include StaticModel
include Presentable
include ::Gitlab::Utils::StrongMemoize
attr_mentionable :safe_message, pipeline: :single_line
@ -313,7 +314,9 @@ class Commit
end
def last_pipeline
@last_pipeline ||= pipelines.last
strong_memoize(:last_pipeline) do
pipelines.last
end
end
def status(ref = nil)

View file

@ -15,7 +15,7 @@ module CacheMarkdownField
# Increment this number every time the renderer changes its output
CACHE_REDCARPET_VERSION = 3
CACHE_COMMONMARK_VERSION_START = 10
CACHE_COMMONMARK_VERSION = 12
CACHE_COMMONMARK_VERSION = 13
# changes to these attributes cause the cache to be invalidates
INVALIDATED_BY = %w[author project].freeze

View file

@ -0,0 +1,22 @@
# frozen_string_literal: true
class LfsDownloadObject
include ActiveModel::Validations
attr_accessor :oid, :size, :link
delegate :sanitized_url, :credentials, to: :sanitized_uri
validates :oid, format: { with: /\A\h{64}\z/ }
validates :size, numericality: { greater_than_or_equal_to: 0 }
validates :link, public_url: { protocols: %w(http https) }
def initialize(oid:, size:, link:)
@oid = oid
@size = size
@link = link
end
def sanitized_uri
@sanitized_uri ||= Gitlab::UrlSanitizer.new(link)
end
end

View file

@ -7,6 +7,7 @@ class Member < ActiveRecord::Base
include Expirable
include Gitlab::Access
include Presentable
include FromUnion
attr_accessor :raw_invite_token
@ -83,6 +84,14 @@ class Member < ActiveRecord::Base
scope :order_recent_sign_in, -> { left_join_users.reorder(Gitlab::Database.nulls_last_order('users.last_sign_in_at', 'DESC')) }
scope :order_oldest_sign_in, -> { left_join_users.reorder(Gitlab::Database.nulls_last_order('users.last_sign_in_at', 'ASC')) }
scope :on_project_and_ancestors, ->(project) do
if project.group
from_union([GroupMember.where(source_id: project.group.self_and_ancestors), project.project_members])
else
project.project_members
end
end
before_validation :generate_invite_token, on: :create, if: -> (member) { member.invite_email.present? }
after_create :send_invite, if: :invite?, unless: :importing?

View file

@ -492,6 +492,7 @@ class Project < ActiveRecord::Base
def reference_pattern
%r{
(?<!#{Gitlab::PathRegex::PATH_START_CHAR})
((?<namespace>#{Gitlab::PathRegex::FULL_NAMESPACE_FORMAT_REGEX})\/)?
(?<project>#{Gitlab::PathRegex::PROJECT_PATH_FORMAT_REGEX})
}x
@ -531,6 +532,14 @@ class Project < ActiveRecord::Base
end
end
def pipelines
if builds_enabled?
super
else
super.external
end
end
# returns all ancestor-groups upto but excluding the given namespace
# when no namespace is given, all ancestors upto the top are returned
def ancestors_upto(top = nil)

View file

@ -74,6 +74,14 @@ class ProjectTeam
end
alias_method :users, :members
# `members` method uses project_authorizations table which
# is updated asynchronously, on project move it still contains
# old members who may not have access to the new location,
# so we filter out only members of project or project's group
def members_in_project_and_ancestors
members.where(id: member_user_ids)
end
def guests
@guests ||= fetch_members(Gitlab::Access::GUEST)
end
@ -191,4 +199,8 @@ class ProjectTeam
def group
project.group
end
def member_user_ids
Member.on_project_and_ancestors(project).select(:user_id)
end
end

View file

@ -10,6 +10,15 @@ module Ci
@subject.project.branch_allows_collaboration?(@user, @subject.ref)
end
condition(:external_pipeline, scope: :subject, score: 0) do
@subject.external?
end
# Disallow users without permissions from accessing internal pipelines
rule { ~can?(:read_build) & ~external_pipeline }.policy do
prevent :read_pipeline
end
rule { protected_ref }.prevent :update_pipeline
rule { can?(:public_access) & branch_allows_collaboration }.policy do

View file

@ -18,6 +18,7 @@ class IssuePolicy < IssuablePolicy
prevent :read_issue_iid
prevent :update_issue
prevent :admin_issue
prevent :create_note
end
rule { locked }.policy do

View file

@ -18,6 +18,7 @@ class NotePolicy < BasePolicy
prevent :read_note
prevent :admin_note
prevent :resolve_note
prevent :award_emoji
end
rule { is_author }.policy do

View file

@ -28,5 +28,8 @@ class PersonalSnippetPolicy < BasePolicy
rule { anonymous }.prevent :comment_personal_snippet
rule { can?(:comment_personal_snippet) }.enable :award_emoji
rule { can?(:comment_personal_snippet) }.policy do
enable :create_note
enable :award_emoji
end
end

View file

@ -103,6 +103,10 @@ class ProjectPolicy < BasePolicy
@subject.feature_available?(:merge_requests, @user)
end
condition(:internal_builds_disabled) do
!@subject.builds_enabled?
end
features = %w[
merge_requests
issues
@ -190,7 +194,6 @@ class ProjectPolicy < BasePolicy
enable :read_build
enable :read_container_image
enable :read_pipeline
enable :read_pipeline_schedule
enable :read_environment
enable :read_deployment
enable :read_merge_request
@ -226,6 +229,7 @@ class ProjectPolicy < BasePolicy
enable :update_build
enable :create_pipeline
enable :update_pipeline
enable :read_pipeline_schedule
enable :create_pipeline_schedule
enable :create_merge_request_from
enable :create_wiki
@ -299,13 +303,12 @@ class ProjectPolicy < BasePolicy
prevent(*create_read_update_admin_destroy(:project_snippet))
end
rule { wiki_disabled & ~has_external_wiki }.policy do
rule { wiki_disabled }.policy do
prevent(*create_read_update_admin_destroy(:wiki))
prevent(:download_wiki_code)
end
rule { builds_disabled | repository_disabled }.policy do
prevent(*create_update_admin_destroy(:pipeline))
prevent(*create_read_update_admin_destroy(:build))
prevent(*create_read_update_admin_destroy(:pipeline_schedule))
prevent(*create_read_update_admin_destroy(:environment))
@ -313,11 +316,22 @@ class ProjectPolicy < BasePolicy
prevent(*create_read_update_admin_destroy(:deployment))
end
# There's two separate cases when builds_disabled is true:
# 1. When internal CI is disabled - builds_disabled && internal_builds_disabled
# - We do not prevent the user from accessing Pipelines to allow him to access external CI
# 2. When the user is not allowed to access CI - builds_disabled && ~internal_builds_disabled
# - We prevent the user from accessing Pipelines
rule { (builds_disabled & ~internal_builds_disabled) | repository_disabled }.policy do
prevent(*create_read_update_admin_destroy(:pipeline))
prevent(*create_read_update_admin_destroy(:commit_status))
end
rule { repository_disabled }.policy do
prevent :push_code
prevent :download_code
prevent :fork_project
prevent :read_commit_status
prevent :read_pipeline
end
rule { container_registry_disabled }.policy do
@ -343,7 +357,6 @@ class ProjectPolicy < BasePolicy
enable :read_merge_request
enable :read_note
enable :read_pipeline
enable :read_pipeline_schedule
enable :read_commit_status
enable :read_container_image
enable :download_code
@ -361,7 +374,6 @@ class ProjectPolicy < BasePolicy
rule { public_builds & can?(:guest_access) }.policy do
enable :read_pipeline
enable :read_pipeline_schedule
end
# These rules are included to allow maintainers of projects to push to certain
@ -376,7 +388,7 @@ class ProjectPolicy < BasePolicy
end.enable :read_issue_iid
rule do
(can?(:read_project_for_iids) & merge_requests_visible_to_user) | can?(:read_merge_request)
(~guest & can?(:read_project_for_iids) & merge_requests_visible_to_user) | can?(:read_merge_request)
end.enable :read_merge_request_iid
private

View file

@ -44,4 +44,6 @@ class ProjectSnippetPolicy < BasePolicy
enable :update_project_snippet
enable :admin_project_snippet
end
rule { ~can?(:read_project_snippet) }.prevent :create_note
end

View file

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Ci
class TriggerPresenter < Gitlab::View::Presenter::Delegated
presents :trigger
def has_token_exposed?
can?(current_user, :admin_trigger, trigger)
end
def token
if has_token_exposed?
trigger.token
else
trigger.short_token
end
end
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class CommitPresenter < Gitlab::View::Presenter::Simple
presents :commit
def status_for(ref)
can?(current_user, :read_commit_status, commit.project) && commit.status(ref)
end
def any_pipelines?
can?(current_user, :read_pipeline, commit.project) && commit.pipelines.any?
end
end

View file

@ -170,6 +170,10 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated
source_branch_exists? && merge_request.can_remove_source_branch?(current_user)
end
def can_read_pipeline?
pipeline && can?(current_user, :read_pipeline, pipeline)
end
def mergeable_discussions_state
# This avoids calling MergeRequest#mergeable_discussions_state without
# considering the state of the MR first. If a MR isn't mergeable, we can

View file

@ -54,7 +54,7 @@ class MergeRequestWidgetEntity < IssuableEntity
end
expose :merge_commit_message
expose :actual_head_pipeline, with: PipelineDetailsEntity, as: :pipeline
expose :actual_head_pipeline, with: PipelineDetailsEntity, as: :pipeline, if: -> (mr, _) { presenter(mr).can_read_pipeline? }
expose :merge_pipeline, with: PipelineDetailsEntity, if: ->(mr, _) { mr.merged? && can?(request.current_user, :read_pipeline, mr.target_project)}
# Booleans

View file

@ -9,7 +9,7 @@ module Notes
if in_reply_to_discussion_id.present?
discussion = find_discussion(in_reply_to_discussion_id)
unless discussion
unless discussion && can?(current_user, :create_note, discussion.noteable)
note = Note.new
note.errors.add(:base, 'Discussion to reply to cannot be found')
return note
@ -34,19 +34,8 @@ module Notes
if project
project.notes.find_discussion(discussion_id)
else
discussion = Note.find_discussion(discussion_id)
noteable = discussion.noteable
return nil unless noteable_without_project?(noteable)
discussion
Note.find_discussion(discussion_id)
end
end
def noteable_without_project?(noteable)
return true if noteable.is_a?(PersonalSnippet) && can?(current_user, :comment_personal_snippet, noteable)
false
end
end
end

View file

@ -373,7 +373,8 @@ class NotificationService
end
def project_was_moved(project, old_path_with_namespace)
recipients = notifiable_users(project.team.members, :mention, project: project)
recipients = project.private? ? project.team.members_in_project_and_ancestors : project.team.members
recipients = notifiable_users(recipients, :mention, project: project)
recipients.each do |recipient|
mailer.project_was_moved_email(

View file

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Projects
# Used by project imports, it removes any potential paths
# included in an error message that could be stored in the DB
class ImportErrorFilter
ERROR_MESSAGE_FILTER = /[^\s]*#{File::SEPARATOR}[^\s]*(?=(\s|\z))/
FILTER_MESSAGE = '[FILTERED]'
def self.filter_message(message)
message.gsub(ERROR_MESSAGE_FILTER, FILTER_MESSAGE)
end
end
end

View file

@ -24,8 +24,12 @@ module Projects
import_data
success
rescue => e
rescue Gitlab::UrlBlocker::BlockedUrlError => e
error("Error importing repository #{project.safe_import_url} into #{project.full_path} - #{e.message}")
rescue => e
message = Projects::ImportErrorFilter.filter_message(e.message)
error("Error importing repository #{project.safe_import_url} into #{project.full_path} - #{message}")
end
private
@ -35,7 +39,7 @@ module Projects
begin
Gitlab::UrlBlocker.validate!(project.import_url, ports: Project::VALID_IMPORT_PORTS)
rescue Gitlab::UrlBlocker::BlockedUrlError => e
raise Error, "Blocked import URL: #{e.message}"
raise e, "Blocked import URL: #{e.message}"
end
end
@ -86,11 +90,11 @@ module Projects
return unless project.lfs_enabled?
oids_to_download = Projects::LfsPointers::LfsImportService.new(project).execute
download_service = Projects::LfsPointers::LfsDownloadService.new(project)
lfs_objects_to_download = Projects::LfsPointers::LfsImportService.new(project).execute
oids_to_download.each do |oid, link|
download_service.execute(oid, link)
lfs_objects_to_download.each do |lfs_download_object|
Projects::LfsPointers::LfsDownloadService.new(project, lfs_download_object)
.execute
end
rescue => e
# Right now, to avoid aborting the importing process, we silently fail

View file

@ -41,16 +41,17 @@ module Projects
end
def parse_response_links(objects_response)
objects_response.each_with_object({}) do |entry, link_list|
objects_response.each_with_object([]) do |entry, link_list|
begin
oid = entry['oid']
link = entry.dig('actions', DOWNLOAD_ACTION, 'href')
raise DownloadLinkNotFound unless link
link_list[oid] = add_credentials(link)
rescue DownloadLinkNotFound, URI::InvalidURIError
Rails.logger.error("Link for Lfs Object with oid #{oid} not found or invalid.")
link_list << LfsDownloadObject.new(oid: entry['oid'],
size: entry['size'],
link: add_credentials(link))
rescue DownloadLinkNotFound, Addressable::URI::InvalidURIError
log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.")
end
end
end
@ -70,7 +71,7 @@ module Projects
end
def add_credentials(link)
uri = URI.parse(link)
uri = Addressable::URI.parse(link)
if should_add_credentials?(uri)
uri.user = remote_uri.user

View file

@ -4,68 +4,93 @@
module Projects
module LfsPointers
class LfsDownloadService < BaseService
VALID_PROTOCOLS = %w[http https].freeze
SizeError = Class.new(StandardError)
OidError = Class.new(StandardError)
attr_reader :lfs_download_object
delegate :oid, :size, :credentials, :sanitized_url, to: :lfs_download_object, prefix: :lfs
def initialize(project, lfs_download_object)
super(project)
@lfs_download_object = lfs_download_object
end
# rubocop: disable CodeReuse/ActiveRecord
def execute(oid, url)
return unless project&.lfs_enabled? && oid.present? && url.present?
def execute
return unless project&.lfs_enabled? && lfs_download_object
return error("LFS file with oid #{lfs_oid} has invalid attributes") unless lfs_download_object.valid?
return if LfsObject.exists?(oid: lfs_oid)
return if LfsObject.exists?(oid: oid)
sanitized_uri = sanitize_url!(url)
with_tmp_file(oid) do |file|
download_and_save_file(file, sanitized_uri)
lfs_object = LfsObject.new(oid: oid, size: file.size, file: file)
project.all_lfs_objects << lfs_object
wrap_download_errors do
download_lfs_file!
end
rescue Gitlab::UrlBlocker::BlockedUrlError => e
Rails.logger.error("LFS file with oid #{oid} couldn't be downloaded: #{e.message}")
rescue StandardError => e
Rails.logger.error("LFS file with oid #{oid} couldn't be downloaded from #{sanitized_uri.sanitized_url}: #{e.message}")
end
# rubocop: enable CodeReuse/ActiveRecord
private
def sanitize_url!(url)
Gitlab::UrlSanitizer.new(url).tap do |sanitized_uri|
# Just validate that HTTP/HTTPS protocols are used. The
# subsequent Gitlab::HTTP.get call will do network checks
# based on the settings.
Gitlab::UrlBlocker.validate!(sanitized_uri.sanitized_url,
protocols: VALID_PROTOCOLS)
def wrap_download_errors(&block)
yield
rescue SizeError, OidError, StandardError => e
error("LFS file with oid #{lfs_oid} could't be downloaded from #{lfs_sanitized_url}: #{e.message}")
end
def download_lfs_file!
with_tmp_file do |tmp_file|
download_and_save_file!(tmp_file)
project.all_lfs_objects << LfsObject.new(oid: lfs_oid,
size: lfs_size,
file: tmp_file)
success
end
end
def download_and_save_file(file, sanitized_uri)
response = Gitlab::HTTP.get(sanitized_uri.sanitized_url, headers(sanitized_uri)) do |fragment|
def download_and_save_file!(file)
digester = Digest::SHA256.new
response = Gitlab::HTTP.get(lfs_sanitized_url, download_headers) do |fragment|
digester << fragment
file.write(fragment)
raise_size_error! if file.size > lfs_size
end
raise StandardError, "Received error code #{response.code}" unless response.success?
raise_size_error! if file.size != lfs_size
raise_oid_error! if digester.hexdigest != lfs_oid
end
def headers(sanitized_uri)
query_options.tap do |headers|
credentials = sanitized_uri.credentials
if credentials[:user].present? || credentials[:password].present?
def download_headers
{ stream_body: true }.tap do |headers|
if lfs_credentials[:user].present? || lfs_credentials[:password].present?
# Using authentication headers in the request
headers[:http_basic_authentication] = [credentials[:user], credentials[:password]]
headers[:basic_auth] = { username: lfs_credentials[:user], password: lfs_credentials[:password] }
end
end
end
def query_options
{ stream_body: true }
end
def with_tmp_file(oid)
def with_tmp_file
create_tmp_storage_dir
File.open(File.join(tmp_storage_dir, oid), 'wb') { |file| yield file }
File.open(tmp_filename, 'wb') do |file|
begin
yield file
rescue StandardError => e
# If the lfs file is successfully downloaded it will be removed
# when it is added to the project's lfs files.
# Nevertheless if any excetion raises the file would remain
# in the file system. Here we ensure to remove it
File.unlink(file) if File.exist?(file)
raise e
end
end
end
def tmp_filename
File.join(tmp_storage_dir, lfs_oid)
end
def create_tmp_storage_dir
@ -79,6 +104,20 @@ module Projects
def storage_dir
@storage_dir ||= Gitlab.config.lfs.storage_path
end
def raise_size_error!
raise SizeError, 'Size mistmatch'
end
def raise_oid_error!
raise OidError, 'Oid mismatch'
end
def error(message, http_status = nil)
log_error(message)
super
end
end
end
end

View file

@ -7,7 +7,11 @@ module Projects
BLOCK_SIZE = 32.kilobytes
MAX_SIZE = 1.terabyte
SITE_PATH = 'public/'.freeze
PUBLIC_DIR = 'public'.freeze
# this has to be invalid group name,
# as it shares the namespace with groups
TMP_EXTRACT_PATH = '@pages.tmp'.freeze
attr_reader :build
@ -27,12 +31,11 @@ module Projects
raise InvalidStateError, 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts
FileUtils.mkdir_p(tmp_path)
Dir.mktmpdir(nil, tmp_path) do |archive_path|
make_secure_tmp_dir(tmp_path) do |archive_path|
extract_archive!(archive_path)
# Check if we did extract public directory
archive_public_path = File.join(archive_path, 'public')
archive_public_path = File.join(archive_path, PUBLIC_DIR)
raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
raise InvalidStateError, 'pages are outdated' unless latest?
@ -85,22 +88,18 @@ module Projects
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
# Calculate page size after extract
public_entry = build.artifacts_metadata_entry(SITE_PATH, recursive: true)
public_entry = build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true)
if public_entry.total_size > max_size
raise InvalidStateError, "artifacts for pages are too large: #{public_entry.total_size}"
end
# Requires UnZip at least 6.00 Info-ZIP.
# -qq be (very) quiet
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
build.artifacts_file.use_file do |artifacts_path|
unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
raise FailedToExtractError, 'pages failed to extract'
end
SafeZip::Extract.new(artifacts_path)
.extract(directories: [PUBLIC_DIR], to: temp_path)
end
rescue SafeZip::Extract::Error => e
raise FailedToExtractError, e.message
end
def deploy_page!(archive_public_path)
@ -139,7 +138,7 @@ module Projects
end
def tmp_path
@tmp_path ||= File.join(::Settings.pages.path, 'tmp')
@tmp_path ||= File.join(::Settings.pages.path, TMP_EXTRACT_PATH)
end
def pages_path
@ -147,11 +146,11 @@ module Projects
end
def public_path
@public_path ||= File.join(pages_path, 'public')
@public_path ||= File.join(pages_path, PUBLIC_DIR)
end
def previous_public_path
@previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
@previous_public_path ||= File.join(pages_path, "#{PUBLIC_DIR}.#{SecureRandom.hex}")
end
def ref
@ -188,5 +187,15 @@ module Projects
def pages_deployments_failed_total_counter
@pages_deployments_failed_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_failed_total, "Counter of GitLab Pages deployments which failed")
end
def make_secure_tmp_dir(tmp_path)
FileUtils.mkdir_p(tmp_path)
path = Dir.mktmpdir(nil, tmp_path)
begin
yield(path)
ensure
FileUtils.remove_entry_secure(path)
end
end
end
end

View file

@ -264,19 +264,34 @@
%strong.fly-out-top-item-name
= _('Registry')
- if project_nav_tab? :wiki
- if project_nav_tab?(:wiki)
- wiki_url = project_wiki_path(@project, :home)
= nav_link(controller: :wikis) do
= link_to get_project_wiki_path(@project), class: 'shortcuts-wiki' do
= link_to wiki_url, class: 'shortcuts-wiki' do
.nav-icon-container
= sprite_icon('book')
%span.nav-item-name
= _('Wiki')
%ul.sidebar-sub-level-items.is-fly-out-only
= nav_link(controller: :wikis, html_options: { class: "fly-out-top-item" } ) do
= link_to get_project_wiki_path(@project) do
= link_to wiki_url do
%strong.fly-out-top-item-name
= _('Wiki')
- if project_nav_tab?(:external_wiki)
- external_wiki_url = @project.external_wiki.external_wiki_url
= nav_link do
= link_to external_wiki_url, class: 'shortcuts-external_wiki' do
.nav-icon-container
= sprite_icon('issue-external')
%span.nav-item-name
= _('External Wiki')
%ul.sidebar-sub-level-items.is-fly-out-only
= nav_link(html_options: { class: "fly-out-top-item" } ) do
= link_to external_wiki_url do
%strong.fly-out-top-item-name
= _('External Wiki')
- if project_nav_tab? :snippets
= nav_link(controller: :snippets) do
= link_to project_snippets_path(@project), class: 'shortcuts-snippets' do

View file

@ -1,4 +1,4 @@
= icon('info-circle fw')
= succeed '.' do
To learn more about this project, read
= link_to "the wiki", get_project_wiki_path(viewer.project)
= link_to "the wiki", project_wiki_path(viewer.project, :home)

View file

@ -1,9 +1,11 @@
- any_pipelines = @commit.present(current_user: current_user).any_pipelines?
%ul.nav-links.no-top.no-bottom.commit-ci-menu.nav.nav-tabs
= nav_link(path: 'commit#show') do
= link_to project_commit_path(@project, @commit.id) do
Changes
%span.badge.badge-pill= @diffs.size
- if can?(current_user, :read_pipeline, @project)
- if any_pipelines
= nav_link(path: 'commit#pipelines') do
= link_to pipelines_project_commit_path(@project, @commit.id) do
Pipelines

View file

@ -74,8 +74,8 @@
%span.commit-info.merge-requests{ 'data-project-commit-path' => merge_requests_project_commit_path(@project, @commit.id, format: :json) }
= icon('spinner spin')
- if @commit.last_pipeline
- last_pipeline = @commit.last_pipeline
- if can?(current_user, :read_pipeline, last_pipeline)
.well-segment.pipeline-info
.status-icon-container
= link_to project_pipeline_path(@project, last_pipeline.id), class: "ci-status-icon-#{last_pipeline.status}" do

View file

@ -9,11 +9,8 @@
.container-fluid{ class: [limited_container_width, container_class] }
= render "commit_box"
- if @commit.status
= render "ci_menu"
- else
.block-connector
= render "projects/diffs/diffs", diffs: @diffs, environment: @environment
= render "projects/diffs/diffs", diffs: @diffs, environment: @environment, is_commit: true
.limited-width-notes
= render "shared/notes/notes_with_form", :autocomplete => true

View file

@ -6,6 +6,7 @@
- merge_request = local_assigns.fetch(:merge_request, nil)
- project = local_assigns.fetch(:project) { merge_request&.project }
- ref = local_assigns.fetch(:ref) { merge_request&.source_branch }
- commit_status = commit.present(current_user: current_user).status_for(ref)
- link = commit_path(project, commit, merge_request: merge_request)
%li.commit.flex-row.js-toggle-container{ id: "commit-#{commit.short_id}" }
@ -22,7 +23,7 @@
%span.commit-row-message.d-block.d-sm-none
&middot;
= commit.short_id
- if commit.status(ref)
- if commit_status
.d-block.d-sm-none
= render_commit_status(commit, ref: ref)
- if commit.description?
@ -45,7 +46,7 @@
- else
= render partial: 'projects/commit/ajax_signature', locals: { commit: commit }
- if commit.status(ref)
- if commit_status
= render_commit_status(commit, ref: ref)
.js-commit-pipeline-status{ data: { endpoint: pipelines_project_commit_path(project, commit.id, ref: ref) } }

View file

@ -4,9 +4,10 @@
%ul.unstyled-list.related-merge-requests
- has_any_head_pipeline = @merge_requests.any?(&:head_pipeline_id)
- @merge_requests.each do |merge_request|
- merge_request = merge_request.present(current_user: current_user)
%li
%span.merge-request-ci-status
- if merge_request.head_pipeline
- if merge_request.can_read_pipeline?
= render_pipeline_status(merge_request.head_pipeline)
- elsif has_any_head_pipeline
= icon('blank fw')

View file

@ -6,7 +6,7 @@
%li
- target = @project.repository.find_branch(branch).dereferenced_target
- pipeline = @project.pipeline_for(branch, target.sha) if target
- if pipeline
- if can?(current_user, :read_pipeline, pipeline)
%span.related-branch-ci-status
= render_pipeline_status(pipeline)
%span.related-branch-info

View file

@ -46,7 +46,7 @@
%li.issuable-status.d-none.d-sm-inline-block
= icon('ban')
CLOSED
- if merge_request.head_pipeline
- if can?(current_user, :read_pipeline, merge_request.head_pipeline)
%li.issuable-pipeline-status.d-none.d-sm-inline-block
= render_pipeline_status(merge_request.head_pipeline)
- if merge_request.open? && merge_request.broken?

View file

@ -6,7 +6,6 @@
= preserve(markdown(commit.description, pipeline: :single_line))
.info-well
- if commit.status
.well-segment.pipeline-info
.icon-container
= icon('clock-o')

View file

@ -1,6 +1,6 @@
%tr
%td
- if can?(current_user, :admin_trigger, trigger)
- if trigger.has_token_exposed?
%span= trigger.token
= clipboard_button(text: trigger.token, title: "Copy trigger token to clipboard")
- else

View file

@ -1,5 +1,5 @@
- @no_container = true
- add_to_breadcrumbs "Wiki", get_project_wiki_path(@project)
- add_to_breadcrumbs "Wiki", project_wiki_path(@project, :home)
- breadcrumb_title s_("Wiki|Pages")
- page_title s_("Wiki|Pages"), _("Wiki")

View file

@ -2,7 +2,7 @@
- breadcrumb_title @page.title.capitalize
- wiki_breadcrumb_dropdown_links(@page.slug)
- page_title @page.title.capitalize, _("Wiki")
- add_to_breadcrumbs _("Wiki"), get_project_wiki_path(@project)
- add_to_breadcrumbs _("Wiki"), project_wiki_path(@project, :home)
.wiki-page-header.has-sidebar-toggle
%button.btn.btn-default.sidebar-toggle.js-sidebar-wiki-toggle{ role: "button", type: "button" }

View file

@ -45,7 +45,7 @@
.prepend-top-0
- if project.archived
%span.prepend-left-10.badge.badge-warning archived
- if can?(current_user, :read_cross_project) && project.pipeline_status.has_status?
- if can?(current_user, :read_cross_project) && project.pipeline_status.has_status? && can?(current_user, :read_build, project)
%span.prepend-left-10
= render_project_pipeline_status(project.pipeline_status)
- if forks

View file

@ -1,3 +1,12 @@
# Alias import callbacks under the /users/auth endpoint so that
# the OAuth2 callback URL can be restricted under http://example.com/users/auth
# instead of http://example.com.
Devise.omniauth_providers.each do |provider|
next if provider == 'ldapmain'
get "/users/auth/-/import/#{provider}/callback", to: "import/#{provider}#callback", as: "users_import_#{provider}_callback"
end
namespace :import do
resource :github, only: [:create, :new], controller: :github do
post :personal_access_token

View file

@ -0,0 +1,60 @@
# frozen_string_literal: true
class UpdateProjectImportVisibilityLevel < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 100
PRIVATE = 0
INTERNAL = 10
disable_ddl_transaction!
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
end
class Project < ActiveRecord::Base
include EachBatch
belongs_to :namespace
IMPORT_TYPE = 'gitlab_project'
scope :with_group_visibility, ->(visibility) do
joins(:namespace)
.where(namespaces: { type: 'Group', visibility_level: visibility })
.where(import_type: IMPORT_TYPE)
.where('projects.visibility_level > namespaces.visibility_level')
end
self.table_name = 'projects'
end
def up
# Update project's visibility to be the same as the group
# if it is more restrictive than `PUBLIC`.
update_projects_visibility(PRIVATE)
update_projects_visibility(INTERNAL)
end
def down
# no-op: unrecoverable data migration
end
private
def update_projects_visibility(visibility)
say_with_time("Updating project visibility to #{visibility} on #{Project::IMPORT_TYPE} imports.") do
Project.with_group_visibility(visibility).select(:id).each_batch(of: BATCH_SIZE) do |batch, _index|
batch_sql = Gitlab::Database.mysql? ? batch.pluck(:id).join(', ') : batch.select(:id).to_sql
say("Updating #{batch.size} items.", true)
execute("UPDATE projects SET visibility_level = '#{visibility}' WHERE id IN (#{batch_sql})")
end
end
end
end

View file

@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20181123042307) do
ActiveRecord::Schema.define(version: 20181219130552) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"

View file

@ -5,6 +5,13 @@ description: "Set and configure Git protocol v2"
# Configuring Git Protocol v2
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/46555) in GitLab 11.4.
> [Temporarily disabled](https://gitlab.com/gitlab-org/gitlab-ce/issues/55769) in GitLab 11.5.8, 11.6.6, 11.7.1, and 11.8+
NOTE: **Note:**
Git protocol v2 support has been [temporarily disabled](https://gitlab.com/gitlab-org/gitlab-ce/issues/55769),
as a feature used to hide certain internal references does not function when it
is enabled, and this has a security impact. Once this problem has been resolved,
protocol v2 support will be re-enabled.
---

View file

@ -130,6 +130,25 @@ Gitaly. To use a custom Gitaly version in CI you need to update
GITALY_SERVER_VERSION. You can use the format `=revision` to use a
non-tagged commit from https://gitlab.com/gitlab-org/gitaly in CI.
To use a different Gitaly repository, e.g., if your changes are present
on a fork, you can specify a `GITALY_REPO_URL` environment variable when
running tests:
```shell
GITALY_REPO_URL=https://gitlab.com/nick.thomas/gitaly bundle exec rspec spec/lib/gitlab/git/repository_spec.rb
```
If your fork of Gitaly is private, you can generate a [Deploy Token](../user/project/deploy_tokens/index.md)
and specify it in the URL:
```shell
GITALY_REPO_URL=https://gitlab+deploy-token-1000:token-here@gitlab.com/nick.thomas/gitaly bundle exec rspec spec/lib/gitlab/git/repository_spec.rb
```
To use a custom Gitaly repository in CI, for instance if you want your
GitLab fork to always use your own Gitaly fork, set `GITALY_REPO_URL`
as a [CI environment variable](../ci/variables/README.md#variables).
---
[Return to Development documentation](README.md)

View file

@ -43,9 +43,13 @@ you to use.
| :--- | :---------- |
| **Name** | This can be anything. Consider something like `<Organization>'s GitLab` or `<Your Name>'s GitLab` or something else descriptive. |
| **Application description** | Fill this in if you wish. |
| **Callback URL** | The URL to your GitLab installation, e.g., `https://gitlab.example.com`. |
| **Callback URL** | The URL to your GitLab installation, e.g., `https://gitlab.example.com/users/auth`. |
| **URL** | The URL to your GitLab installation, e.g., `https://gitlab.example.com`. |
NOTE: Be sure to append `/users/auth` to the end of the callback URL
to prevent a [OAuth2 convert
redirect](http://tetraph.com/covert_redirect/) vulnerability.
NOTE: Starting in GitLab 8.15, you MUST specify a callback URL, or you will
see an "Invalid redirect_uri" message. For more details, see [the
Bitbucket documentation](https://confluence.atlassian.com/bitbucket/oauth-faq-338365710.html).

View file

@ -19,10 +19,15 @@ GitHub will generate an application ID and secret key for you to use.
- Application name: This can be anything. Consider something like `<Organization>'s GitLab` or `<Your Name>'s GitLab` or something else descriptive.
- Homepage URL: The URL to your GitLab installation. 'https://gitlab.company.com'
- Application description: Fill this in if you wish.
- Authorization callback URL is 'http(s)://${YOUR_DOMAIN}'. Please make sure the port is included if your GitLab instance is not configured on default port.
1. Select "Register application".
- Authorization callback URL: `http(s)://${YOUR_DOMAIN}/users/auth`. Please make sure the port is included if your GitLab instance is not configured on default port.
1. You should now see a Client ID and Client Secret near the top right of the page (see screenshot).
NOTE: Be sure to append `/users/auth` to the end of the callback URL
to prevent a [OAuth2 convert
redirect](http://tetraph.com/covert_redirect/) vulnerability.
1. Select **Register application**.
1. You should now see a pair of **Client ID** and **Client Secret** near the top right of the page (see screenshot).
Keep this page open as you continue configuration.
![GitHub app](img/github_app.png)

View file

@ -1175,8 +1175,11 @@ module API
end
class Trigger < Grape::Entity
include ::API::Helpers::Presentable
expose :id
expose :token, :description
expose :token
expose :description
expose :created_at, :updated_at, :last_used
expose :owner, using: Entities::UserBasic
end

View file

@ -0,0 +1,29 @@
# frozen_string_literal: true
module API
module Helpers
##
# This module makes it possible to use `app/presenters` with
# Grape Entities. It instantiates model presenter and passes
# options defined in the API endpoint to the presenter itself.
#
# present object, with: Entities::Something,
# current_user: current_user,
# another_option: 'my options'
#
# Example above will make `current_user` and `another_option`
# values available in the subclass of `Gitlab::View::Presenter`
# thorough a separate method in the presenter.
#
# The model class needs to have `::Presentable` module mixed in
# if you want to use `API::Helpers::Presentable`.
#
module Presentable
extend ActiveSupport::Concern
def initialize(object, options = {})
super(object.present(options), options)
end
end
end
end

View file

@ -76,7 +76,7 @@ module API
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
end
get ':id/pipelines/:pipeline_id' do
authorize! :read_pipeline, user_project
authorize! :read_pipeline, pipeline
present pipeline, with: Entities::Pipeline
end
@ -89,7 +89,7 @@ module API
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
end
post ':id/pipelines/:pipeline_id/retry' do
authorize! :update_pipeline, user_project
authorize! :update_pipeline, pipeline
pipeline.retry_failed(current_user)
@ -104,7 +104,7 @@ module API
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
end
post ':id/pipelines/:pipeline_id/cancel' do
authorize! :update_pipeline, user_project
authorize! :update_pipeline, pipeline
pipeline.cancel_running

View file

@ -51,7 +51,7 @@ module API
triggers = user_project.triggers.includes(:trigger_requests)
present paginate(triggers), with: Entities::Trigger
present paginate(triggers), with: Entities::Trigger, current_user: current_user
end
# rubocop: enable CodeReuse/ActiveRecord
@ -68,7 +68,7 @@ module API
trigger = user_project.triggers.find(params.delete(:trigger_id))
break not_found!('Trigger') unless trigger
present trigger, with: Entities::Trigger
present trigger, with: Entities::Trigger, current_user: current_user
end
desc 'Create a trigger' do
@ -85,7 +85,7 @@ module API
declared_params(include_missing: false).merge(owner: current_user))
if trigger.valid?
present trigger, with: Entities::Trigger
present trigger, with: Entities::Trigger, current_user: current_user
else
render_validation_error!(trigger)
end
@ -106,7 +106,7 @@ module API
break not_found!('Trigger') unless trigger
if trigger.update(declared_params(include_missing: false))
present trigger, with: Entities::Trigger
present trigger, with: Entities::Trigger, current_user: current_user
else
render_validation_error!(trigger)
end
@ -127,7 +127,7 @@ module API
if trigger.update(owner: current_user)
status :ok
present trigger, with: Entities::Trigger
present trigger, with: Entities::Trigger, current_user: current_user
else
render_validation_error!(trigger)
end

View file

@ -8,6 +8,10 @@ module Banzai
#
# Based on HTML::Pipeline::AutolinkFilter
#
# Note that our CommonMark parser, `commonmarker` (using the autolink extension)
# handles standard autolinking, like http/https. We detect additional
# schemes (smb, rdar, etc).
#
# Context options:
# :autolink - Boolean, skips all processing done by this filter when false
# :link_attr - Hash of attributes for the generated links
@ -107,10 +111,13 @@ module Banzai
end
end
# match has come from node.to_html above, so we know it's encoded
# correctly.
# Since this came from a Text node, make sure the new href is encoded.
# `commonmarker` percent encodes the domains of links it handles, so
# do the same (instead of using `normalized_encode`).
href_safe = Addressable::URI.encode(match).html_safe
html_safe_match = match.html_safe
options = link_options.merge(href: html_safe_match)
options = link_options.merge(href: href_safe)
content_tag(:a, html_safe_match, options) + dropped
end

View file

@ -5,16 +5,28 @@ module Banzai
# HTML Filter to modify the attributes of external links
class ExternalLinkFilter < HTML::Pipeline::Filter
SCHEMES = ['http', 'https', nil].freeze
RTLO = "\u202E".freeze
ENCODED_RTLO = '%E2%80%AE'.freeze
def call
links.each do |node|
uri = uri(node['href'].to_s)
# URI.parse does stricter checking on the url than Addressable,
# such as on `mailto:` links. Since we've been using it, do an
# initial parse for validity and then use Addressable
# for IDN support, etc
uri = uri_strict(node['href'].to_s)
if uri
node.set_attribute('href', uri.to_s)
addressable_uri = addressable_uri(node['href'])
else
addressable_uri = nil
end
node.set_attribute('href', uri.to_s) if uri
if SCHEMES.include?(uri&.scheme) && !internal_url?(uri)
node.set_attribute('rel', 'nofollow noreferrer noopener')
node.set_attribute('target', '_blank')
unless internal_url?(addressable_uri)
punycode_autolink_node!(addressable_uri, node)
sanitize_link_text!(node)
add_malicious_tooltip!(addressable_uri, node)
add_nofollow!(addressable_uri, node)
end
end
@ -23,12 +35,18 @@ module Banzai
private
def uri(href)
def uri_strict(href)
URI.parse(href)
rescue URI::Error
nil
end
def addressable_uri(href)
Addressable::URI.parse(href)
rescue Addressable::URI::InvalidURIError
nil
end
def links
query = 'descendant-or-self::a[@href and not(@href = "")]'
doc.xpath(query)
@ -45,6 +63,57 @@ module Banzai
def internal_url
@internal_url ||= URI.parse(Gitlab.config.gitlab.url)
end
# Only replace an autolink with an IDN with it's punycode
# version if we need emailable links. Otherwise let it
# be shown normally and the tooltips will show the
# punycode version.
def punycode_autolink_node!(uri, node)
return unless uri
return unless context[:emailable_links]
unencoded_uri_str = Addressable::URI.unencode(node['href'])
if unencoded_uri_str == node.content && idn?(uri)
node.content = uri.normalize
end
end
# escape any right-to-left (RTLO) characters in link text
def sanitize_link_text!(node)
node.inner_html = node.inner_html.gsub(RTLO, ENCODED_RTLO)
end
# If the domain is an international domain name (IDN),
# let's expose with a tooltip in case it's intended
# to be malicious. This is particularly useful for links
# where the link text is not the same as the actual link.
# We will continue to show the unicode version of the domain
# in autolinked link text, which could contain emojis, etc.
#
# Also show the tooltip if the url contains the RTLO character,
# as this is an indicator of a malicious link
def add_malicious_tooltip!(uri, node)
if idn?(uri) || has_encoded_rtlo?(uri)
node.add_class('has-tooltip')
node.set_attribute('title', uri.normalize)
end
end
def add_nofollow!(uri, node)
if SCHEMES.include?(uri&.scheme)
node.set_attribute('rel', 'nofollow noreferrer noopener')
node.set_attribute('target', '_blank')
end
end
def idn?(uri)
uri&.normalized_host&.start_with?('xn--')
end
def has_encoded_rtlo?(uri)
uri&.to_s&.include?(ENCODED_RTLO)
end
end
end
end

View file

@ -11,7 +11,8 @@ module Banzai
def self.transform_context(context)
super(context).merge(
only_path: false
only_path: false,
emailable_links: true
)
end
end

View file

@ -85,7 +85,7 @@ module Gitlab
user_id: user.id,
user_name: user.name,
user_username: user.username,
user_email: user.email,
user_email: user.public_email,
user_avatar: user.avatar_url(only_path: false),
project_id: project.id,
project: project.hook_attrs,

View file

@ -43,7 +43,7 @@ module Gitlab
raise ProjectNotFound unless author.can?(:read_project, project)
end
raise UserNotAuthorizedError unless author.can?(permission, project || noteable)
raise UserNotAuthorizedError unless author.can?(permission, try(:noteable) || project)
end
def verify_record!(record:, invalid_exception:, record_name:)

View file

@ -13,10 +13,12 @@ module Gitlab
@project = project
end
def lfs_download_object
LfsDownloadObject.new(oid: lfs_object.oid, size: lfs_object.size, link: lfs_object.link)
end
def execute
Projects::LfsPointers::LfsDownloadService
.new(project)
.execute(lfs_object.oid, lfs_object.download_link)
Projects::LfsPointers::LfsDownloadService.new(project, lfs_download_object).execute
end
end
end

View file

@ -9,11 +9,11 @@ module Gitlab
attr_reader :attributes
expose_attribute :oid, :download_link
expose_attribute :oid, :link, :size
# Builds a lfs_object
def self.from_api_response(lfs_object)
new({ oid: lfs_object[0], download_link: lfs_object[1] })
new({ oid: lfs_object.oid, link: lfs_object.link, size: lfs_object.size })
end
# Builds a new lfs_object using a Hash that was built from a JSON payload.

View file

@ -103,7 +103,7 @@ module Gitlab
def project_params
@project_params ||= begin
attrs = json_params.merge(override_params)
attrs = json_params.merge(override_params).merge(visibility_level)
# Cleaning all imported and overridden params
Gitlab::ImportExport::AttributeCleaner.clean(relation_hash: attrs,
@ -123,6 +123,13 @@ module Gitlab
end
end
def visibility_level
level = override_params['visibility_level'] || json_params['visibility_level'] || @project.visibility_level
level = @project.group.visibility_level if @project.group && level > @project.group.visibility_level
{ 'visibility_level' => level }
end
# Given a relation hash containing one or more models and its relationships,
# loops through each model and each object from a model type and
# and assigns its correspondent attributes hash from +tree_hash+

View file

@ -6,6 +6,7 @@ module Gitlab
def initialize(project)
@project = project
@errors = []
@logger = Gitlab::Import::Logger.build
end
def active_export_count
@ -21,19 +22,14 @@ module Gitlab
end
def error(error)
error_out(error.message, caller[0].dup)
log_error(message: error.message, caller: caller[0].dup)
log_debug(backtrace: error.backtrace&.join("\n"))
add_error_message(error.message)
# Debug:
if error.backtrace
Rails.logger.error("Import/Export backtrace: #{error.backtrace.join("\n")}")
else
Rails.logger.error("No backtrace found")
end
end
def add_error_message(error_message)
@errors << error_message
def add_error_message(message)
@errors << filtered_error_message(message)
end
def after_export_in_progress?
@ -50,8 +46,25 @@ module Gitlab
@project.disk_path
end
def error_out(message, caller)
Rails.logger.error("Import/Export error raised on #{caller}: #{message}")
def log_error(details)
@logger.error(log_base_data.merge(details))
end
def log_debug(details)
@logger.debug(log_base_data.merge(details))
end
def log_base_data
{
importer: 'Import/Export',
import_jid: @project&.import_state&.import_jid,
project_id: @project&.id,
project_path: @project&.full_path
}
end
def filtered_error_message(message)
Projects::ImportErrorFilter.filter_message(message)
end
def after_export_lock_file

View file

@ -125,7 +125,8 @@ module Gitlab
# allow non-regex validations, etc), `NAMESPACE_FORMAT_REGEX_JS` serves as a Javascript-compatible version of
# `NAMESPACE_FORMAT_REGEX`, with the negative lookbehind assertion removed. This means that the client-side validation
# will pass for usernames ending in `.atom` and `.git`, but will be caught by the server-side validation.
PATH_REGEX_STR = '[a-zA-Z0-9_\.][a-zA-Z0-9_\-\.]*'.freeze
PATH_START_CHAR = '[a-zA-Z0-9_\.]'.freeze
PATH_REGEX_STR = PATH_START_CHAR + '[a-zA-Z0-9_\-\.]*'.freeze
NAMESPACE_FORMAT_REGEX_JS = PATH_REGEX_STR + '[a-zA-Z0-9_\-]|[a-zA-Z0-9_]'.freeze
NO_SUFFIX_REGEX = /(?<!\.git|\.atom)/.freeze

97
lib/safe_zip/entry.rb Normal file
View file

@ -0,0 +1,97 @@
# frozen_string_literal: true
module SafeZip
class Entry
attr_reader :zip_archive, :zip_entry
attr_reader :path, :params
def initialize(zip_archive, zip_entry, params)
@zip_archive = zip_archive
@zip_entry = zip_entry
@params = params
@path = ::File.expand_path(zip_entry.name, params.extract_path)
end
def path_dir
::File.dirname(path)
end
def real_path_dir
::File.realpath(path_dir)
end
def exist?
::File.exist?(path)
end
def extract
# do not extract if file is not part of target directory
return false unless matching_target_directory
# do not overwrite existing file
raise SafeZip::Extract::AlreadyExistsError, "File already exists #{zip_entry.name}" if exist?
create_path_dir
if zip_entry.file?
extract_file
elsif zip_entry.directory?
extract_dir
elsif zip_entry.symlink?
extract_symlink
else
raise SafeZip::Extract::UnsupportedEntryError, "File #{zip_entry.name} cannot be extracted"
end
rescue SafeZip::Extract::Error
raise
rescue => e
raise SafeZip::Extract::ExtractError, e.message
end
private
def extract_file
zip_archive.extract(zip_entry, path)
end
def extract_dir
FileUtils.mkdir(path)
end
def extract_symlink
source_path = read_symlink
real_source_path = expand_symlink(source_path)
# ensure that source path of symlink is within target directories
unless real_source_path.start_with?(matching_target_directory)
raise SafeZip::Extract::PermissionDeniedError, "Symlink cannot be created targeting: #{source_path}"
end
::File.symlink(source_path, path)
end
def create_path_dir
# Create all directories, but ignore permissions
FileUtils.mkdir_p(path_dir)
# disallow to make path dirs to point to another directories
unless path_dir == real_path_dir
raise SafeZip::Extract::PermissionDeniedError, "Directory of #{zip_entry.name} points to another directory"
end
end
def matching_target_directory
params.matching_target_directory(path)
end
def read_symlink
zip_archive.read(zip_entry)
end
def expand_symlink(source_path)
::File.realpath(source_path, path_dir)
rescue
raise SafeZip::Extract::SymlinkSourceDoesNotExistError, "Symlink source #{source_path} does not exist"
end
end
end

73
lib/safe_zip/extract.rb Normal file
View file

@ -0,0 +1,73 @@
# frozen_string_literal: true
module SafeZip
class Extract
Error = Class.new(StandardError)
PermissionDeniedError = Class.new(Error)
SymlinkSourceDoesNotExistError = Class.new(Error)
UnsupportedEntryError = Class.new(Error)
AlreadyExistsError = Class.new(Error)
NoMatchingError = Class.new(Error)
ExtractError = Class.new(Error)
attr_reader :archive_path
def initialize(archive_file)
@archive_path = archive_file
end
def extract(opts = {})
params = SafeZip::ExtractParams.new(**opts)
if Feature.enabled?(:safezip_use_rubyzip, default_enabled: true)
extract_with_ruby_zip(params)
else
legacy_unsafe_extract_with_system_zip(params)
end
end
private
def extract_with_ruby_zip(params)
::Zip::File.open(archive_path) do |zip_archive|
# Extract all files in the following order:
# 1. Directories first,
# 2. Files next,
# 3. Symlinks last (or anything else)
extracted = extract_all_entries(zip_archive, params,
zip_archive.lazy.select(&:directory?))
extracted += extract_all_entries(zip_archive, params,
zip_archive.lazy.select(&:file?))
extracted += extract_all_entries(zip_archive, params,
zip_archive.lazy.reject(&:directory?).reject(&:file?))
raise NoMatchingError, 'No entries extracted' unless extracted > 0
end
end
def extract_all_entries(zip_archive, params, entries)
entries.count do |zip_entry|
SafeZip::Entry.new(zip_archive, zip_entry, params)
.extract
end
end
def legacy_unsafe_extract_with_system_zip(params)
# Requires UnZip at least 6.00 Info-ZIP.
# -n never overwrite existing files
args = %W(unzip -n -qq #{archive_path})
# We add * to end of directory, because we want to extract directory and all subdirectories
args += params.directories_wildcard
# Target directory where we extract
args += %W(-d #{params.extract_path})
unless system(*args)
raise Error, 'archive failed to extract'
end
end
end
end

View file

@ -0,0 +1,36 @@
# frozen_string_literal: true
module SafeZip
class ExtractParams
include Gitlab::Utils::StrongMemoize
attr_reader :directories, :extract_path
def initialize(directories:, to:)
@directories = directories
@extract_path = ::File.realpath(to)
end
def matching_target_directory(path)
target_directories.find do |directory|
path.start_with?(directory)
end
end
def target_directories
strong_memoize(:target_directories) do
directories.map do |directory|
::File.join(::File.expand_path(directory, extract_path), '')
end
end
end
def directories_wildcard
strong_memoize(:directories_wildcard) do
directories.map do |directory|
::File.join(directory, '*')
end
end
end
end
end

View file

@ -2780,6 +2780,9 @@ msgstr ""
msgid "Explore public groups"
msgstr ""
msgid "External Wiki"
msgstr ""
msgid "Facebook"
msgstr ""

View file

@ -68,7 +68,7 @@
"js-cookie": "^2.1.3",
"jszip": "^3.1.3",
"jszip-utils": "^0.0.2",
"katex": "^0.9.0",
"katex": "^0.10.0",
"marked": "^0.3.12",
"mermaid": "^8.0.0-rc.8",
"monaco-editor": "^0.14.3",

View file

@ -8,6 +8,7 @@ describe Import::BitbucketController do
let(:secret) { "sekrettt" }
let(:refresh_token) { SecureRandom.hex(15) }
let(:access_params) { { token: token, expires_at: nil, expires_in: nil, refresh_token: nil } }
let(:code) { SecureRandom.hex(8) }
def assign_session_tokens
session[:bitbucket_token] = token
@ -32,10 +33,16 @@ describe Import::BitbucketController do
expires_in: expires_in,
refresh_token: refresh_token)
allow_any_instance_of(OAuth2::Client)
.to receive(:get_token).and_return(access_token)
.to receive(:get_token)
.with(hash_including(
'grant_type' => 'authorization_code',
'code' => code,
redirect_uri: users_import_bitbucket_callback_url),
{})
.and_return(access_token)
stub_omniauth_provider('bitbucket')
get :callback
get :callback, code: code
expect(session[:bitbucket_token]).to eq(token)
expect(session[:bitbucket_refresh_token]).to eq(refresh_token)

View file

@ -12,9 +12,15 @@ describe Import::GithubController do
it "redirects to GitHub for an access token if logged in with GitHub" do
allow(controller).to receive(:logged_in_with_provider?).and_return(true)
expect(controller).to receive(:go_to_provider_for_permissions)
expect(controller).to receive(:go_to_provider_for_permissions).and_call_original
allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
.to receive(:authorize_url)
.with(users_import_github_callback_url)
.and_call_original
get :new
expect(response).to have_http_status(302)
end
end

View file

@ -121,7 +121,7 @@ describe Projects::IssuesController do
it 'redirects to signin if not logged in' do
get :new, namespace_id: project.namespace, project_id: project
expect(flash[:notice]).to eq 'Please sign in to create the new issue.'
expect(flash[:alert]).to eq 'You need to sign in or sign up before continuing.'
expect(response).to redirect_to(new_user_session_path)
end

View file

@ -3,9 +3,14 @@ require 'spec_helper'
describe Projects::PipelineSchedulesController do
include AccessMatchersForController
set(:user) { create(:user) }
set(:project) { create(:project, :public, :repository) }
set(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
before do
project.add_developer(user)
end
describe 'GET #index' do
render_views
@ -14,6 +19,10 @@ describe Projects::PipelineSchedulesController do
create(:ci_pipeline_schedule, :inactive, project: project)
end
before do
sign_in(user)
end
it 'renders the index view' do
visit_pipelines_schedules
@ -21,7 +30,7 @@ describe Projects::PipelineSchedulesController do
expect(response).to render_template(:index)
end
it 'avoids N + 1 queries' do
it 'avoids N + 1 queries', :request_store do
control_count = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }.count
create_list(:ci_pipeline_schedule, 2, project: project)

View file

@ -5,7 +5,7 @@ describe Projects::PipelinesController do
set(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:feature) { ProjectFeature::DISABLED }
let(:feature) { ProjectFeature::ENABLED }
before do
stub_not_protect_default_branch
@ -184,6 +184,27 @@ describe Projects::PipelinesController do
end
end
context 'when builds are disabled' do
let(:feature) { ProjectFeature::DISABLED }
it 'users can not see internal pipelines' do
get_pipeline_json
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when pipeline is external' do
let(:pipeline) { create(:ci_pipeline, source: :external, project: project) }
it 'users can see the external pipeline' do
get_pipeline_json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to be(pipeline.id)
end
end
end
def get_pipeline_json
get :show, namespace_id: project.namespace, project_id: project, id: pipeline, format: :json
end
@ -316,16 +337,14 @@ describe Projects::PipelinesController do
format: :json
end
context 'when builds are enabled' do
let(:feature) { ProjectFeature::ENABLED }
it 'retries a pipeline without returning any content' do
expect(response).to have_gitlab_http_status(:no_content)
expect(build.reload).to be_retried
end
end
context 'when builds are disabled' do
let(:feature) { ProjectFeature::DISABLED }
it 'fails to retry pipeline' do
expect(response).to have_gitlab_http_status(:not_found)
end
@ -343,16 +362,14 @@ describe Projects::PipelinesController do
format: :json
end
context 'when builds are enabled' do
let(:feature) { ProjectFeature::ENABLED }
it 'cancels a pipeline without returning any content' do
expect(response).to have_gitlab_http_status(:no_content)
expect(pipeline.reload).to be_canceled
end
end
context 'when builds are disabled' do
let(:feature) { ProjectFeature::DISABLED }
it 'fails to retry pipeline' do
expect(response).to have_gitlab_http_status(:not_found)
end

View file

@ -206,6 +206,38 @@ describe UsersController do
end
end
describe 'GET #contributed' do
let(:project) { create(:project, :public) }
let(:current_user) { create(:user) }
before do
sign_in(current_user)
project.add_developer(public_user)
project.add_developer(private_user)
end
context 'with public profile' do
it 'renders contributed projects' do
create(:push_event, project: project, author: public_user)
get :contributed, username: public_user.username
expect(assigns[:contributed_projects]).not_to be_empty
end
end
context 'with private profile' do
it 'does not render contributed projects' do
create(:push_event, project: project, author: private_user)
get :contributed, username: private_user.username
expect(assigns[:contributed_projects]).to be_empty
end
end
end
describe 'GET #snippets' do
before do
sign_in(user)

View file

@ -144,6 +144,27 @@ describe 'Dashboard Projects' do
expect(page).to have_link('Commit: passed')
end
end
context 'guest user of project and project has private pipelines' do
let(:guest_user) { create(:user) }
before do
project.update(public_builds: false)
project.add_guest(guest_user)
sign_in(guest_user)
end
it 'shows that the last pipeline passed' do
visit dashboard_projects_path
page.within('.controls') do
expect(page).not_to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
expect(page).not_to have_css('.ci-status-link')
expect(page).not_to have_css('.ci-status-icon-success')
expect(page).not_to have_link('Commit: passed')
end
end
end
end
context 'last push widget', :use_clean_rails_memory_store_caching do

View file

@ -1,6 +1,8 @@
require 'spec_helper'
describe 'Math rendering', :js do
let!(:project) { create(:project, :public) }
it 'renders inline and display math correctly' do
description = <<~MATH
This math is inline $`a^2+b^2=c^2`$.
@ -11,12 +13,26 @@ describe 'Math rendering', :js do
```
MATH
project = create(:project, :public)
issue = create(:issue, project: project, description: description)
visit project_issue_path(project, issue)
expect(page).to have_selector('.katex .mord.mathit', text: 'b')
expect(page).to have_selector('.katex-display .mord.mathit', text: 'b')
expect(page).to have_selector('.katex .mord.mathdefault', text: 'b')
expect(page).to have_selector('.katex-display .mord.mathdefault', text: 'b')
end
it 'only renders non XSS links' do
description = <<~MATH
This link is valid $`\\href{javascript:alert('xss');}{xss}`$.
This link is valid $`\\href{https://gitlab.com}{Gitlab}`$.
MATH
issue = create(:issue, project: project, description: description)
visit project_issue_path(project, issue)
expect(page).to have_selector('.katex-error', text: "\href{javascript:alert('xss');}{xss}")
expect(page).to have_selector('.katex-html a', text: 'Gitlab')
end
end

View file

@ -15,6 +15,9 @@ describe 'Projects > Settings > User changes default branch' do
let(:project) { create(:project, :repository, namespace: user.namespace) }
it 'allows to change the default branch', :js do
# Otherwise, running JS may overwrite our change to project_default_branch
wait_for_requests
select2('fix', from: '#project_default_branch')
page.within '#default-branch-settings' do

View file

@ -452,9 +452,9 @@ describe "Internal Project Access" do
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end

View file

@ -485,7 +485,7 @@ describe "Private Project Access" do
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }

View file

@ -272,11 +272,11 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_allowed_for(:external) }
it { is_expected.to be_allowed_for(:visitor) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/environments" do

View file

@ -31,4 +31,16 @@ describe ContributedProjectsFinder do
it { is_expected.to match_array([private_project, internal_project, public_project]) }
end
context 'user with private profile' do
it 'does not return contributed projects' do
private_user = create(:user, private_profile: true)
public_project.add_maintainer(private_user)
create(:push_event, project: public_project, author: private_user)
projects = described_class.new(private_user).execute(current_user)
expect(projects).to be_empty
end
end
end

BIN
spec/fixtures/pages_non_writeable.zip vendored Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
spec/fixtures/safe_zip/valid-simple.zip vendored Normal file

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show more