Merge tag 'debian/13.2.5-1' into buster-fasttrack

gitlab Debian release 13.2.5-1
This commit is contained in:
Pirate Praveen 2020-08-19 00:45:04 +05:30
commit 463b215bb5
60 changed files with 950 additions and 346 deletions

View file

@ -2,10 +2,39 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 13.2.3 (2020-08-05)
## 13.2.5 (2020-08-17)
- No changes.
## 13.2.4 (2020-08-11)
### Security (1 change)
- Add decompressed archive size validation on Project/Group Import. !38736
### Fixed (1 change)
- Fix automatic issue creation via Prometheus alerts. !37884
## 13.2.3 (2020-08-05)
### Security (12 changes)
- Update kramdown gem to version 2.3.0.
- Enforce 2FA on Doorkeeper controllers.
- Revoke OAuth grants when a user revokes an application.
- Refresh project authorizations when transferring groups.
- Stop excess logs from failure to send invite email when group no longer exists.
- Verify confirmed email for OAuth Authorize POST endpoint.
- Fix XSS in Markdown reference tooltips.
- Fix XSS in milestone tooltips.
- Fix xss vulnerability on jobs view.
- Block 40-character hexadecimal branches.
- Prevent a temporary access escalation before group memberships are recalculated when specialized project share workers are enabled.
- Update GitLab Runner Helm Chart to 0.18.2.
## 13.2.2 (2020-07-29)
### Fixed (3 changes)

View file

@ -1 +1 @@
13.2.3
13.2.5

View file

@ -142,7 +142,7 @@ gem 'deckar01-task_list', '2.3.1'
gem 'gitlab-markup', '~> 1.7.1'
gem 'github-markup', '~> 1.7.0', require: 'github/markup'
gem 'commonmarker', '~> 0.20'
gem 'kramdown', '~> 2.2.1'
gem 'kramdown', '~> 2.3.0'
gem 'RedCloth', '~> 4.3.2'
gem 'rdoc', '~> 6.1.2'
gem 'org-ruby', '~> 0.9.12'

View file

@ -589,7 +589,7 @@ GEM
kgio (2.11.3)
knapsack (1.17.0)
rake
kramdown (2.2.1)
kramdown (2.3.0)
rexml
kramdown-parser-gfm (1.1.0)
kramdown (~> 2.0)
@ -1297,7 +1297,7 @@ DEPENDENCIES
jwt (~> 2.1.0)
kaminari (~> 1.0)
knapsack (~> 1.17)
kramdown (~> 2.2.1)
kramdown (~> 2.3.0)
kubeclient (~> 4.6.0)
letter_opener_web (~> 1.3.4)
license_finder (~> 5.4)

View file

@ -1 +1 @@
13.2.3
13.2.5

View file

@ -1,11 +1,15 @@
<script>
import { escape, isEmpty } from 'lodash';
import { isEmpty } from 'lodash';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { sprintf, __ } from '../../locale';
import { __ } from '../../locale';
import { GlSprintf, GlLink } from '@gitlab/ui';
export default {
creatingEnvironment: 'creating',
components: {
CiIcon,
GlSprintf,
GlLink,
},
props: {
deploymentStatus: {
@ -31,7 +35,7 @@ export default {
return this.outOfDateEnvironmentMessage();
case 'failed':
return this.failedEnvironmentMessage();
case 'creating':
case this.$options.creatingEnvironment:
return this.creatingEnvironmentMessage();
default:
return '';
@ -39,17 +43,12 @@ export default {
},
environmentLink() {
if (this.hasEnvironment) {
return sprintf(
'%{startLink}%{name}%{endLink}',
{
startLink: `<a href="${this.deploymentStatus.environment.environment_path}" class="js-environment-link">`,
name: escape(this.deploymentStatus.environment.name),
endLink: '</a>',
},
false,
);
return {
link: this.deploymentStatus.environment.environment_path,
name: this.deploymentStatus.environment.name,
};
}
return '';
return {};
},
hasLastDeployment() {
return this.hasEnvironment && this.deploymentStatus.environment.last_deployment;
@ -74,201 +73,107 @@ export default {
}
const { name, path } = this.deploymentCluster;
const escapedName = escape(name);
const escapedPath = escape(path);
if (!escapedPath) {
return escapedName;
}
return sprintf(
'%{startLink}%{name}%{endLink}',
{
startLink: `<a href="${escapedPath}" class="js-job-cluster-link">`,
name: escapedName,
endLink: '</a>',
},
false,
);
return {
path,
name,
};
},
kubernetesNamespace() {
return this.hasCluster ? this.deploymentCluster.kubernetes_namespace : null;
},
deploymentLink() {
return {
path: this.lastDeploymentPath,
name:
this.deploymentStatus.status === this.$options.creatingEnvironment
? __('latest deployment')
: __('most recent deployment'),
};
},
},
methods: {
deploymentLink(name) {
return sprintf(
'%{startLink}%{name}%{endLink}',
{
startLink: `<a href="${this.lastDeploymentPath}" class="js-job-deployment-link">`,
name,
endLink: '</a>',
},
false,
);
},
failedEnvironmentMessage() {
const { environmentLink } = this;
return sprintf(
__('The deployment of this job to %{environmentLink} did not succeed.'),
{ environmentLink },
false,
);
return __('The deployment of this job to %{environmentLink} did not succeed.');
},
lastEnvironmentMessage() {
const { environmentLink, clusterNameOrLink, hasCluster, kubernetesNamespace } = this;
if (hasCluster) {
if (kubernetesNamespace) {
return sprintf(
__(
'This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
),
{ environmentLink, clusterNameOrLink, kubernetesNamespace },
false,
if (this.hasCluster) {
if (this.kubernetesNamespace) {
return __(
'This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
);
}
// we know the cluster but not the namespace
return sprintf(
__('This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink}.'),
{ environmentLink, clusterNameOrLink },
false,
);
return __('This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink}.');
}
// not a cluster deployment
return sprintf(__('This job is deployed to %{environmentLink}.'), { environmentLink }, false);
return __('This job is deployed to %{environmentLink}.');
},
outOfDateEnvironmentMessage() {
const {
hasLastDeployment,
hasCluster,
environmentLink,
clusterNameOrLink,
kubernetesNamespace,
} = this;
if (hasLastDeployment) {
const deploymentLink = this.deploymentLink(__('most recent deployment'));
if (hasCluster) {
if (kubernetesNamespace) {
return sprintf(
__(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. View the %{deploymentLink}.',
),
{ environmentLink, clusterNameOrLink, kubernetesNamespace, deploymentLink },
false,
if (this.hasLastDeployment) {
if (this.hasCluster) {
if (this.kubernetesNamespace) {
return __(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. View the %{deploymentLink}.',
);
}
// we know the cluster but not the namespace
return sprintf(
__(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}. View the %{deploymentLink}.',
),
{ environmentLink, clusterNameOrLink, deploymentLink },
false,
return __(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}. View the %{deploymentLink}.',
);
}
// not a cluster deployment
return sprintf(
__(
'This job is an out-of-date deployment to %{environmentLink}. View the %{deploymentLink}.',
),
{ environmentLink, deploymentLink },
false,
return __(
'This job is an out-of-date deployment to %{environmentLink}. View the %{deploymentLink}.',
);
}
// no last deployment, i.e. this is the first deployment
if (hasCluster) {
if (kubernetesNamespace) {
return sprintf(
__(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
),
{ environmentLink, clusterNameOrLink, kubernetesNamespace },
false,
if (this.hasCluster) {
if (this.kubernetesNamespace) {
return __(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
);
}
// we know the cluster but not the namespace
return sprintf(
__(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
),
{ environmentLink, clusterNameOrLink },
false,
return __(
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
);
}
// not a cluster deployment
return sprintf(
__('This job is an out-of-date deployment to %{environmentLink}.'),
{ environmentLink },
false,
);
return __('This job is an out-of-date deployment to %{environmentLink}.');
},
creatingEnvironmentMessage() {
const {
hasLastDeployment,
hasCluster,
environmentLink,
clusterNameOrLink,
kubernetesNamespace,
} = this;
if (hasLastDeployment) {
const deploymentLink = this.deploymentLink(__('latest deployment'));
if (hasCluster) {
if (kubernetesNamespace) {
return sprintf(
__(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. This will overwrite the %{deploymentLink}.',
),
{ environmentLink, clusterNameOrLink, kubernetesNamespace, deploymentLink },
false,
if (this.hasLastDeployment) {
if (this.hasCluster) {
if (this.kubernetesNamespace) {
return __(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. This will overwrite the %{deploymentLink}.',
);
}
// we know the cluster but not the namespace
return sprintf(
__(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}. This will overwrite the %{deploymentLink}.',
),
{ environmentLink, clusterNameOrLink, deploymentLink },
false,
return __(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}. This will overwrite the %{deploymentLink}.',
);
}
// not a cluster deployment
return sprintf(
__(
'This job is creating a deployment to %{environmentLink}. This will overwrite the %{deploymentLink}.',
),
{ environmentLink, deploymentLink },
false,
return __(
'This job is creating a deployment to %{environmentLink}. This will overwrite the %{deploymentLink}.',
);
}
// no last deployment, i.e. this is the first deployment
if (hasCluster) {
if (kubernetesNamespace) {
return sprintf(
__(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
),
{ environmentLink, clusterNameOrLink, kubernetesNamespace },
false,
if (this.hasCluster) {
if (this.kubernetesNamespace) {
return __(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
);
}
// we know the cluster but not the namespace
return sprintf(
__(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
),
{ environmentLink, clusterNameOrLink },
false,
return __(
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
);
}
// not a cluster deployment
return sprintf(
__('This job is creating a deployment to %{environmentLink}.'),
{ environmentLink },
false,
);
return __('This job is creating a deployment to %{environmentLink}.');
},
},
};
@ -277,7 +182,37 @@ export default {
<div class="gl-mt-3 gl-mb-3 js-environment-container">
<div class="environment-information">
<ci-icon :status="iconStatus" />
<p class="inline gl-mb-0" v-html="environment"></p>
<p class="inline gl-mb-0">
<gl-sprintf :message="environment">
<template #environmentLink>
<gl-link
v-if="hasEnvironment"
:href="environmentLink.link"
data-testid="job-environment-link"
v-text="environmentLink.name"
/>
</template>
<template #clusterNameOrLink>
<gl-link
v-if="clusterNameOrLink.path"
:href="clusterNameOrLink.path"
data-testid="job-cluster-link"
v-text="clusterNameOrLink.name"
/>
<template v-else>{{ clusterNameOrLink.name }}</template>
</template>
<template #kubernetesNamespace>
<template>{{ kubernetesNamespace }}</template>
</template>
<template #deploymentLink>
<gl-link
:href="deploymentLink.path"
data-testid="job-deployment-link"
v-text="deploymentLink.name"
/>
</template>
</gl-sprintf>
</p>
</div>
</div>
</template>

View file

@ -12,10 +12,17 @@ module EnforcesTwoFactorAuthentication
included do
before_action :check_two_factor_requirement
helper_method :two_factor_grace_period_expired?, :two_factor_skippable?
# to include this in controllers inheriting from `ActionController::Metal`
# we need to add this block
if respond_to?(:helper_method)
helper_method :two_factor_grace_period_expired?, :two_factor_skippable?
end
end
def check_two_factor_requirement
return unless respond_to?(:current_user)
if two_factor_authentication_required? && current_user_requires_two_factor?
redirect_to profile_two_factor_auth_path
end

View file

@ -2,7 +2,6 @@
class Oauth::ApplicationsController < Doorkeeper::ApplicationsController
include Gitlab::GonHelper
include Gitlab::Allowable
include PageLayoutHelper
include OauthApplications
include Gitlab::Experimentation::ControllerConcern
@ -19,8 +18,6 @@ class Oauth::ApplicationsController < Doorkeeper::ApplicationsController
around_action :set_locale
helper_method :can?
layout 'profile'
def index

View file

@ -4,7 +4,7 @@ class Oauth::AuthorizationsController < Doorkeeper::AuthorizationsController
include Gitlab::Experimentation::ControllerConcern
include InitializesCurrentUserMode
before_action :verify_confirmed_email!, only: [:new]
before_action :verify_confirmed_email!
layout 'profile'

View file

@ -16,7 +16,7 @@ class Oauth::AuthorizedApplicationsController < Doorkeeper::AuthorizedApplicatio
if params[:token_id].present?
current_resource_owner.oauth_authorized_tokens.find(params[:token_id]).revoke
else
Doorkeeper::AccessToken.revoke_all_for(params[:id], current_resource_owner)
Doorkeeper::Application.revoke_tokens_and_grants_for(params[:id], current_resource_owner)
end
redirect_to applications_profile_url,

View file

@ -1,6 +1,8 @@
# frozen_string_literal: true
class Oauth::TokenInfoController < Doorkeeper::TokenInfoController
include EnforcesTwoFactorAuthentication
def show
if doorkeeper_token && doorkeeper_token.accessible?
token_json = doorkeeper_token.as_json

View file

@ -0,0 +1,5 @@
# frozen_string_literal: true
class Oauth::TokensController < Doorkeeper::TokensController
include EnforcesTwoFactorAuthentication
end

View file

@ -29,7 +29,7 @@ module IssuablesHelper
def sidebar_milestone_tooltip_label(milestone)
return _('Milestone') unless milestone.present?
[milestone[:title], sidebar_milestone_remaining_days(milestone) || _('Milestone')].join('<br/>')
[escape_once(milestone[:title]), sidebar_milestone_remaining_days(milestone) || _('Milestone')].join('<br/>')
end
def sidebar_milestone_remaining_days(milestone)

View file

@ -13,6 +13,8 @@ module Emails
@member_source_type = member_source_type
@member_id = member_id
return unless member_exists?
user = User.find(recipient_id)
member_email_with_layout(
@ -24,6 +26,8 @@ module Emails
@member_source_type = member_source_type
@member_id = member_id
return unless member_exists?
member_email_with_layout(
to: member.user.notification_email_for(notification_group),
subject: subject("Access to the #{member_source.human_name} #{member_source.model_name.singular} was granted"))
@ -45,6 +49,8 @@ module Emails
@member_id = member_id
@token = token
return unless member_exists?
member_email_with_layout(
to: member.invite_email,
subject: subject("Invitation to join the #{member_source.human_name} #{member_source.model_name.singular}"))
@ -53,6 +59,8 @@ module Emails
def member_invite_accepted_email(member_source_type, member_id)
@member_source_type = member_source_type
@member_id = member_id
return unless member_exists?
return unless member.created_by
member_email_with_layout(
@ -74,9 +82,11 @@ module Emails
subject: subject('Invitation declined'))
end
# rubocop: disable CodeReuse/ActiveRecord
def member
@member ||= Member.find(@member_id)
@member ||= Member.find_by(id: @member_id)
end
# rubocop: enable CodeReuse/ActiveRecord
def member_source
@member_source ||= member.source
@ -88,6 +98,11 @@ module Emails
private
def member_exists?
Gitlab::AppLogger.info("Tried to send an email invitation for a deleted group. Member id: #{@member_id}") if member.blank?
member.present?
end
def member_source_class
@member_source_type.classify.constantize
end

View file

@ -3,7 +3,7 @@
module Clusters
module Applications
class Runner < ApplicationRecord
VERSION = '0.18.1'
VERSION = '0.18.2'
self.table_name = 'clusters_applications_runners'

View file

@ -34,6 +34,8 @@ module AlertManagement
else
create_alert_management_alert
end
process_incident_alert
end
def reset_alert_management_alert_status
@ -47,16 +49,17 @@ module AlertManagement
end
def create_alert_management_alert
am_alert = AlertManagement::Alert.new(am_alert_params.merge(ended_at: nil))
if am_alert.save
am_alert.execute_services
new_alert = AlertManagement::Alert.new(am_alert_params.merge(ended_at: nil))
if new_alert.save
new_alert.execute_services
@am_alert = new_alert
return
end
logger.warn(
message: 'Unable to create AlertManagement::Alert',
project_id: project.id,
alert_errors: am_alert.errors.messages
alert_errors: new_alert.errors.messages
)
end
@ -89,12 +92,21 @@ module AlertManagement
SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
end
def process_incident_alert
return unless am_alert
return if am_alert.issue
IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, am_alert.id)
end
def logger
@logger ||= Gitlab::AppLogger
end
def am_alert
@am_alert ||= AlertManagement::Alert.not_resolved.for_fingerprint(project, gitlab_fingerprint).first
strong_memoize(:am_alert) do
AlertManagement::Alert.not_resolved.for_fingerprint(project, gitlab_fingerprint).first
end
end
def bad_request

View file

@ -6,9 +6,10 @@ module AuthorizedProjectUpdate
BATCH_SIZE = 1000
def initialize(project, group)
def initialize(project, group, group_access = nil)
@project = project
@group = group
@group_access = group_access
end
def execute
@ -19,19 +20,20 @@ module AuthorizedProjectUpdate
user_ids_to_delete = []
members.each do |member|
new_access_level = access_level(member.access_level)
existing_access_level = existing_authorizations[member.user_id]
if existing_access_level
# User might already have access to the project unrelated to the
# current project share
next if existing_access_level >= member.access_level
next if existing_access_level >= new_access_level
user_ids_to_delete << member.user_id
end
authorizations_to_create << { user_id: member.user_id,
project_id: project.id,
access_level: member.access_level }
access_level: new_access_level }
end
update_authorizations(user_ids_to_delete, authorizations_to_create)
@ -42,7 +44,15 @@ module AuthorizedProjectUpdate
private
attr_reader :project, :group
attr_reader :project, :group, :group_access
def access_level(membership_access_level)
return membership_access_level unless group_access
# access level must not be higher than the max access level set when
# creating the project share
[membership_access_level, group_access].min
end
def existing_project_authorizations(members)
user_ids = members.map(&:user_id)

View file

@ -37,6 +37,7 @@ module Groups
# Overridden in EE
def post_update_hooks(updated_project_ids)
refresh_project_authorizations
end
def ensure_allowed_transfer
@ -121,6 +122,16 @@ module Groups
@group.add_owner(current_user)
end
def refresh_project_authorizations
ProjectAuthorization.where(project_id: @group.all_projects.select(:id)).delete_all # rubocop: disable CodeReuse/ActiveRecord
# refresh authorized projects for current_user immediately
current_user.refresh_authorized_projects
# schedule refreshing projects for all the members of the group
@group.refresh_members_authorized_projects
end
def raise_transfer_error(message)
raise TransferError, localized_error_messages[message]
end

View file

@ -13,7 +13,7 @@ module Projects
)
if link.save
setup_authorizations(group)
setup_authorizations(group, link.group_access)
success(link: link)
else
error(link.errors.full_messages.to_sentence, 409)
@ -22,9 +22,10 @@ module Projects
private
def setup_authorizations(group)
def setup_authorizations(group, group_access = nil)
if Feature.enabled?(:specialized_project_authorization_project_share_worker)
AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(project.id, group.id)
AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(
project.id, group.id, group_access)
# AuthorizedProjectsWorker uses an exclusive lease per user but
# specialized workers might have synchronization issues. Until we

View file

@ -10,12 +10,13 @@ module AuthorizedProjectUpdate
idempotent!
def perform(project_id, group_id)
def perform(project_id, group_id, group_access = nil)
project = Project.find(project_id)
group = Group.find(group_id)
AuthorizedProjectUpdate::ProjectGroupLinkCreateService.new(project, group)
.execute
AuthorizedProjectUpdate::ProjectGroupLinkCreateService
.new(project, group, group_access)
.execute
end
end
end

View file

@ -16,6 +16,9 @@ class AuthorizedProjectsWorker
if Rails.env.test?
def self.bulk_perform_and_wait(args_list, timeout: 10)
end
def self.bulk_perform_inline(args_list)
end
end
# rubocop: disable CodeReuse/ActiveRecord

View file

@ -29,7 +29,11 @@ module IncidentManagement
end
def parsed_payload(alert)
Gitlab::Alerting::NotificationPayloadParser.call(alert.payload.to_h, alert.project)
if alert.prometheus?
alert.payload
else
Gitlab::Alerting::NotificationPayloadParser.call(alert.payload.to_h, alert.project)
end
end
def create_issue_for(alert)

View file

@ -25,7 +25,8 @@ Rails.application.routes.draw do
controllers applications: 'oauth/applications',
authorized_applications: 'oauth/authorized_applications',
authorizations: 'oauth/authorizations',
token_info: 'oauth/token_info'
token_info: 'oauth/token_info',
tokens: 'oauth/tokens'
end
# This prefixless path is required because Jira gets confused if we set it up with a path

9
debian/changelog vendored
View file

@ -1,3 +1,12 @@
gitlab (13.2.5-1) unstable; urgency=medium
* Bump minimum version of webpack to 4.43
* New upstream version 13.2.5 with a security fix (CVE ID not assigned yet)
* Refresh patches
* Relax dependency on ruby-browser
-- Pirate Praveen <praveen@debian.org> Tue, 18 Aug 2020 20:03:24 +0530
gitlab (13.2.3-2+fto10+1) buster-fasttrack; urgency=medium
* Rebuild for buster-fasttrack.

2
debian/control vendored
View file

@ -89,7 +89,7 @@ Depends: ${shlibs:Depends}, ${misc:Depends},
# GitLab Pages letsencrypt support
ruby-acme-client (>= 2.0.6~),
# Browser detection
ruby-browser (>= 2.5~),
ruby-browser (>= 4.2~),
# GPG
ruby-gpgme (>= 2.0.19~),
# LDAP Auth

View file

@ -162,10 +162,10 @@ gitlab Gemfile
+gem 'gitlab-markup', '~> 1.7', '>= 1.7.1'
+gem 'github-markup', '~> 1.7', require: 'github/markup'
gem 'commonmarker', '~> 0.20'
-gem 'kramdown', '~> 2.2.1'
-gem 'kramdown', '~> 2.3.0'
-gem 'RedCloth', '~> 4.3.2'
-gem 'rdoc', '~> 6.1.2'
+gem 'kramdown', '~> 2.2', '>= 2.2.1'
+gem 'kramdown', '~> 2.3'
+gem 'RedCloth', '~> 4.3', '>= 4.3.2'
+gem 'rdoc', '~> 6.1', '>= 6.1.2'
gem 'org-ruby', '~> 0.9.12'

View file

@ -4,7 +4,7 @@ Allow rdoc from ruby 2.5 to match requirement
+++ b/Gemfile
@@ -143,7 +143,7 @@
gem 'commonmarker', '~> 0.20'
gem 'kramdown', '~> 2.2', '>= 2.2.1'
gem 'kramdown', '~> 2.3'
gem 'RedCloth', '~> 4.3', '>= 4.3.2'
-gem 'rdoc', '~> 6.1', '>= 6.1.2'
+gem 'rdoc', '~> 6.0'

View file

@ -11,4 +11,4 @@ maintaining two almost same packages.
-gem 'gitlab-markup', '~> 1.7', '>= 1.7.1'
gem 'github-markup', '~> 1.7', require: 'github/markup'
gem 'commonmarker', '~> 0.20'
gem 'kramdown', '~> 2.2', '>= 2.2.1'
gem 'kramdown', '~> 2.3'

41
debian/patches/0487-relax-browser.patch vendored Normal file
View file

@ -0,0 +1,41 @@
From bf53d8ef7eb5dfdfc825267c5bb2ec8c0e17948f Mon Sep 17 00:00:00 2001
From: Pirate Praveen <praveen@debian.org>
Date: Mon, 13 Jul 2020 10:41:30 +0000
Subject: [PATCH] Update browser rubygem 2.5 to 4.2
---
Gemfile | 2 +-
Gemfile.lock | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
--- a/Gemfile
+++ b/Gemfile
@@ -66,7 +66,7 @@
gem 'acme-client', '~> 2.0', '>= 2.0.6'
# Browser detection
-gem 'browser', '~> 2.5'
+gem 'browser', '~> 4.2'
# GPG
gem 'gpgme', '~> 2.0', '>= 2.0.19'
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -125,7 +125,7 @@
actionpack (>= 5.0)
activemodel (>= 5.0)
brakeman (4.2.1)
- browser (2.5.3)
+ browser (4.2.0)
builder (3.2.4)
bullet (6.0.2)
activesupport (>= 3.0.0)
@@ -1195,7 +1195,7 @@
bootsnap (~> 1.4.6)
bootstrap_form (~> 4.2.0)
brakeman (~> 4.2)
- browser (~> 2.5)
+ browser (~> 4.2)
bullet (~> 6.0.2)
bundler-audit (~> 0.6.1)
capybara (~> 3.22.0)

View file

@ -13,6 +13,7 @@
0484-relax-grape-entity.patch
0485-relax-gitlab-sidekiq-fetcher.patch
0486-relax-sidekiq.patch
0487-relax-browser.patch
0500-set-webpack-root.patch
0510-remove-dev-dependencies.patch
0520-add-system-lib-path-for-webpack.patch

View file

@ -56,6 +56,13 @@ If you have other target branches, include them in your regex. (See [Enabling pu
The default branch also defaults to being a [protected branch](../user/project/protected_branches.md),
which already limits users from pushing directly.
#### Default restricted branch names
> Introduced in GitLab 12.10.
By default, GitLab restricts certain formats of branch names for security purposes.
Currently 40-character hexadecimal names, similar to Git commit hashes, are prohibited.
### Custom Push Rules **(CORE ONLY)**
It's possible to create custom push rules rather than the push rules available in

View file

@ -0,0 +1,28 @@
---
type: reference, howto
---
# Project Import Decompressed Archive Size Limits
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/31564) in GitLab 13.2.
When using [Project Import](../user/project/settings/import_export.md), the size of the decompressed project archive is limited to 10Gb.
If decompressed size exceeds this limit, `Decompressed archive size validation failed` error is returned.
## Enable/disable size validation
Decompressed size validation is enabled by default.
If you have a project with decompressed size exceeding this limit,
it is possible to disable the validation by turning off the
`validate_import_decompressed_archive_size` feature flag.
Start a [Rails console](../administration/troubleshooting/debug.md#starting-a-rails-console-session).
```ruby
# Disable
Feature.disable(:validate_import_decompressed_archive_size)
# Enable
Feature.enable(:validate_import_decompressed_archive_size)
```

View file

@ -24,7 +24,7 @@ file path fragments to start seeing results.
## Syntax highlighting
> Support for `.gitlab.ci.yml` validation [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/218472) in GitLab 13.2.
> Support for `.gitlab-ci.yml` validation [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/218472) in GitLab 13.2.
As expected from an IDE, syntax highlighting for many languages within
the Web IDE will make your direct editing even easier.

View file

@ -119,3 +119,5 @@ module Banzai
end
end
end
Banzai::Filter::LabelReferenceFilter.prepend_if_ee('EE::Banzai::Filter::LabelReferenceFilter')

View file

@ -55,7 +55,6 @@ module Banzai
attributes[:reference_type] ||= self.class.reference_type
attributes[:container] ||= 'body'
attributes[:placement] ||= 'top'
attributes[:html] ||= 'true'
attributes.delete(:original) if context[:no_original_data]
attributes.map do |key, value|
%Q(data-#{key.to_s.dasherize}="#{escape_once(value)}")

View file

@ -5,6 +5,8 @@
module Gitlab
class BaseDoorkeeperController < ActionController::Base
include Gitlab::Allowable
include EnforcesTwoFactorAuthentication
helper_method :can?
end
end

View file

@ -12,7 +12,8 @@ module Gitlab
push_protected_branch: 'You are not allowed to push code to protected branches on this project.',
create_protected_branch: 'You are not allowed to create protected branches on this project.',
invalid_commit_create_protected_branch: 'You can only use an existing protected branch ref as the basis of a new protected branch.',
non_web_create_protected_branch: 'You can only create protected branches using the web interface and API.'
non_web_create_protected_branch: 'You can only create protected branches using the web interface and API.',
prohibited_hex_branch_name: 'You cannot create a branch with a 40-character hexadecimal branch name.'
}.freeze
LOG_MESSAGES = {
@ -32,11 +33,20 @@ module Gitlab
end
end
prohibited_branch_checks
protected_branch_checks
end
private
def prohibited_branch_checks
return unless Feature.enabled?(:prohibit_hexadecimal_branch_names, project, default_enabled: true)
if branch_name =~ /\A\h{40}\z/
raise GitAccess::ForbiddenError, ERROR_MESSAGES[:prohibited_hex_branch_name]
end
end
def protected_branch_checks
logger.log_timed(LOG_MESSAGES[:protected_branch_checks]) do
return unless ProtectedBranch.protected?(project, branch_name) # rubocop:disable Cop/AvoidReturnFromBlocks

View file

@ -0,0 +1,90 @@
# frozen_string_literal: true
require 'zlib'
module Gitlab
module ImportExport
class DecompressedArchiveSizeValidator
include Gitlab::Utils::StrongMemoize
DEFAULT_MAX_BYTES = 10.gigabytes.freeze
CHUNK_SIZE = 4096.freeze
attr_reader :error
def initialize(archive_path:, max_bytes: self.class.max_bytes)
@archive_path = archive_path
@max_bytes = max_bytes
@bytes_read = 0
@total_reads = 0
@denominator = 5
@error = nil
end
def valid?
strong_memoize(:valid) do
validate
end
end
def self.max_bytes
DEFAULT_MAX_BYTES
end
def archive_file
@archive_file ||= File.open(@archive_path)
end
private
def validate
until archive_file.eof?
compressed_chunk = archive_file.read(CHUNK_SIZE)
inflate_stream.inflate(compressed_chunk) do |chunk|
@bytes_read += chunk.size
@total_reads += 1
end
# Start garbage collection every 5 reads in order
# to prevent memory bloat during archive decompression
GC.start if gc_start?
if @bytes_read > @max_bytes
@error = error_message
return false
end
end
true
rescue => e
@error = error_message
Gitlab::ErrorTracking.track_exception(e)
Gitlab::Import::Logger.info(
message: @error,
error: e.message
)
false
ensure
inflate_stream.close
archive_file.close
end
def inflate_stream
@inflate_stream ||= Zlib::Inflate.new(Zlib::MAX_WBITS + 32)
end
def gc_start?
@total_reads % @denominator == 0
end
def error_message
_('Decompressed archive size validation failed.')
end
end
end
end

View file

@ -28,6 +28,7 @@ module Gitlab
copy_archive
wait_for_archived_file do
validate_decompressed_archive_size if Feature.enabled?(:validate_import_decompressed_archive_size, default_enabled: true)
decompress_archive
end
rescue => e
@ -82,6 +83,14 @@ module Gitlab
def extracted_files
Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| IGNORED_FILENAMES.include?(File.basename(f)) }
end
def validate_decompressed_archive_size
raise ImporterError.new(size_validator.error) unless size_validator.valid?
end
def size_validator
@size_validator ||= DecompressedArchiveSizeValidator.new(archive_path: @archive_file)
end
end
end
end

View file

@ -3,7 +3,7 @@
module Gitlab
module MarkdownCache
# Increment this number every time the renderer changes its output
CACHE_COMMONMARK_VERSION = 23
CACHE_COMMONMARK_VERSION = 24
CACHE_COMMONMARK_VERSION_START = 10
BaseError = Class.new(StandardError)

View file

@ -7495,6 +7495,9 @@ msgstr ""
msgid "Decline and sign out"
msgstr ""
msgid "Decompressed archive size validation failed."
msgstr ""
msgid "Default Branch"
msgstr ""

View file

@ -19,12 +19,29 @@ RSpec.describe Oauth::ApplicationsController do
it { is_expected.to redirect_to(new_user_session_path) }
end
shared_examples 'redirects to 2fa setup page when the user requires it' do
context 'when 2fa is set up on application level' do
before do
stub_application_setting(require_two_factor_authentication: true)
end
it { is_expected.to redirect_to(profile_two_factor_auth_path) }
end
context 'when 2fa is set up on group level' do
let(:user) { create(:user, require_two_factor_authentication_from_group: true) }
it { is_expected.to redirect_to(profile_two_factor_auth_path) }
end
end
describe 'GET #new' do
subject { get :new }
it { is_expected.to have_gitlab_http_status(:ok) }
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
end
describe 'DELETE #destroy' do
@ -33,6 +50,7 @@ RSpec.describe Oauth::ApplicationsController do
it { is_expected.to redirect_to(oauth_applications_url) }
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
end
describe 'GET #edit' do
@ -41,6 +59,7 @@ RSpec.describe Oauth::ApplicationsController do
it { is_expected.to have_gitlab_http_status(:ok) }
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
end
describe 'PUT #update' do
@ -49,6 +68,7 @@ RSpec.describe Oauth::ApplicationsController do
it { is_expected.to redirect_to(oauth_application_url(application)) }
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
end
describe 'GET #show' do
@ -57,6 +77,7 @@ RSpec.describe Oauth::ApplicationsController do
it { is_expected.to have_gitlab_http_status(:ok) }
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
end
describe 'GET #index' do
@ -73,6 +94,7 @@ RSpec.describe Oauth::ApplicationsController do
end
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
end
describe 'POST #create' do
@ -112,6 +134,7 @@ RSpec.describe Oauth::ApplicationsController do
end
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
end
end
@ -119,6 +142,10 @@ RSpec.describe Oauth::ApplicationsController do
it 'current_user_mode available' do
expect(subject.current_user_mode).not_to be_nil
end
it 'includes Two-factor enforcement concern' do
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
end
end
describe 'locale' do

View file

@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Oauth::AuthorizationsController do
let(:user) { create(:user, confirmed_at: confirmed_at) }
let(:confirmed_at) { 1.hour.ago }
let!(:application) { create(:oauth_application, scopes: 'api read_user', redirect_uri: 'http://example.com') }
let(:params) do
{
@ -17,9 +19,45 @@ RSpec.describe Oauth::AuthorizationsController do
sign_in(user)
end
describe 'GET #new' do
shared_examples 'OAuth Authorizations require confirmed user' do
context 'when the user is confirmed' do
let(:user) { create(:user) }
context 'when there is already an access token for the application with a matching scope' do
before do
scopes = Doorkeeper::OAuth::Scopes.from_string('api')
allow(Doorkeeper.configuration).to receive(:scopes).and_return(scopes)
create(:oauth_access_token, application: application, resource_owner_id: user.id, scopes: scopes)
end
it 'authorizes the request and redirects' do
subject
expect(request.session['user_return_to']).to be_nil
expect(response).to have_gitlab_http_status(:found)
end
end
end
context 'when the user is unconfirmed' do
let(:confirmed_at) { nil }
it 'returns 200 and renders error view' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('doorkeeper/authorizations/error')
end
end
end
describe 'GET #new' do
subject { get :new, params: params }
include_examples 'OAuth Authorizations require confirmed user'
context 'when the user is confirmed' do
let(:confirmed_at) { 1.hour.ago }
context 'without valid params' do
it 'returns 200 code and renders error view' do
@ -34,7 +72,7 @@ RSpec.describe Oauth::AuthorizationsController do
render_views
it 'returns 200 code and renders view' do
get :new, params: params
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('doorkeeper/authorizations/new')
@ -44,42 +82,28 @@ RSpec.describe Oauth::AuthorizationsController do
application.update(trusted: true)
request.session['user_return_to'] = 'http://example.com'
get :new, params: params
subject
expect(request.session['user_return_to']).to be_nil
expect(response).to have_gitlab_http_status(:found)
end
context 'when there is already an access token for the application' do
context 'when the request scope matches any of the created token scopes' do
before do
scopes = Doorkeeper::OAuth::Scopes.from_string('api')
allow(Doorkeeper.configuration).to receive(:scopes).and_return(scopes)
create :oauth_access_token, application: application, resource_owner_id: user.id, scopes: scopes
end
it 'authorizes the request and redirects' do
get :new, params: params
expect(request.session['user_return_to']).to be_nil
expect(response).to have_gitlab_http_status(:found)
end
end
end
end
end
context 'when the user is unconfirmed' do
let(:user) { create(:user, confirmed_at: nil) }
it 'returns 200 and renders error view' do
get :new, params: params
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('doorkeeper/authorizations/error')
end
end
end
describe 'POST #create' do
subject { post :create, params: params }
include_examples 'OAuth Authorizations require confirmed user'
end
describe 'DELETE #destroy' do
subject { delete :destroy, params: params }
include_examples 'OAuth Authorizations require confirmed user'
end
it 'includes Two-factor enforcement concern' do
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
end
end

View file

@ -18,4 +18,24 @@ RSpec.describe Oauth::AuthorizedApplicationsController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
describe 'DELETE #destroy' do
let(:application) { create(:oauth_application) }
let!(:grant) { create(:oauth_access_grant, resource_owner_id: user.id, application: application) }
let!(:access_token) { create(:oauth_access_token, resource_owner: user, application: application) }
it 'revokes both access grants and tokens' do
expect(grant).not_to be_revoked
expect(access_token).not_to be_revoked
delete :destroy, params: { id: application.id }
expect(grant.reload).to be_revoked
expect(access_token.reload).to be_revoked
end
end
it 'includes Two-factor enforcement concern' do
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
end
end

View file

@ -68,4 +68,8 @@ RSpec.describe Oauth::TokenInfoController do
end
end
end
it 'includes Two-factor enforcement concern' do
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
end
end

View file

@ -0,0 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Oauth::TokensController do
it 'includes Two-factor enforcement concern' do
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
end
end

View file

@ -101,6 +101,16 @@ FactoryBot.define do
trait :prometheus do
monitoring_tool { Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus] }
payload do
{
annotations: {
title: 'This is a prometheus error',
summary: 'Summary of the error',
description: 'Description of the error'
},
startsAt: started_at
}.with_indifferent_access
end
end
trait :all_fields do

View file

@ -551,7 +551,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'shows deployment message' do
expect(page).to have_content 'This job is deployed to production'
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
end
context 'when there is a cluster used for the deployment' do
@ -583,7 +583,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'shows a link for the job' do
expect(page).to have_link environment.name
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
end
end
@ -593,7 +593,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'shows a link to latest deployment' do
expect(page).to have_link environment.name
expect(page).to have_content 'This job is creating a deployment'
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
end
end
end
@ -645,15 +645,15 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it 'renders a link to the most recent deployment' do
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
expect(find('.js-job-deployment-link')['href']).to include(second_deployment.deployable.project.path, second_deployment.deployable_id.to_s)
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
expect(find('[data-testid="job-deployment-link"]')['href']).to include(second_deployment.deployable.project.path, second_deployment.deployable_id.to_s)
end
context 'when deployment does not have a deployable' do
let!(:second_deployment) { create(:deployment, :success, environment: environment, deployable: nil) }
it 'has an empty href' do
expect(find('.js-job-deployment-link')['href']).to be_empty
expect(find('[data-testid="job-deployment-link"]')['href']).to be_empty
end
end
end
@ -679,7 +679,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
expected_text = 'This job is creating a deployment to staging'
expect(page).to have_css('.environment-information', text: expected_text)
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
end
context 'when it has deployment' do
@ -690,7 +690,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
expect(page).to have_css('.environment-information', text: expected_text)
expect(page).to have_css('.environment-information', text: 'latest deployment')
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
end
end
end
@ -705,7 +705,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
'.environment-information', text: expected_text)
expect(page).not_to have_css(
'.environment-information', text: 'latest deployment')
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
end
end
end

View file

@ -1,14 +1,13 @@
import Vue from 'vue';
import component from '~/jobs/components/environments_block.vue';
import mountComponent from '../../helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import EnvironmentsBlock from '~/jobs/components/environments_block.vue';
const TEST_CLUSTER_NAME = 'test_cluster';
const TEST_CLUSTER_PATH = 'path/to/test_cluster';
const TEST_KUBERNETES_NAMESPACE = 'this-is-a-kubernetes-namespace';
describe('Environments block', () => {
const Component = Vue.extend(component);
let vm;
let wrapper;
const status = {
group: 'success',
icon: 'status_success',
@ -38,20 +37,23 @@ describe('Environments block', () => {
});
const createComponent = (deploymentStatus = {}, deploymentCluster = {}) => {
vm = mountComponent(Component, {
deploymentStatus,
deploymentCluster,
iconStatus: status,
wrapper = mount(EnvironmentsBlock, {
propsData: {
deploymentStatus,
deploymentCluster,
iconStatus: status,
},
});
};
const findText = () => vm.$el.textContent.trim();
const findJobDeploymentLink = () => vm.$el.querySelector('.js-job-deployment-link');
const findEnvironmentLink = () => vm.$el.querySelector('.js-environment-link');
const findClusterLink = () => vm.$el.querySelector('.js-job-cluster-link');
const findText = () => wrapper.find(EnvironmentsBlock).text();
const findJobDeploymentLink = () => wrapper.find('[data-testid="job-deployment-link"]');
const findEnvironmentLink = () => wrapper.find('[data-testid="job-environment-link"]');
const findClusterLink = () => wrapper.find('[data-testid="job-cluster-link"]');
afterEach(() => {
vm.$destroy();
wrapper.destroy();
wrapper = null;
});
describe('with last deployment', () => {
@ -61,7 +63,7 @@ describe('Environments block', () => {
environment,
});
expect(findText()).toEqual('This job is deployed to environment.');
expect(findText()).toBe('This job is deployed to environment.');
});
describe('when there is a cluster', () => {
@ -74,7 +76,7 @@ describe('Environments block', () => {
createDeploymentWithCluster(),
);
expect(findText()).toEqual(
expect(findText()).toBe(
`This job is deployed to environment using cluster ${TEST_CLUSTER_NAME}.`,
);
});
@ -89,7 +91,7 @@ describe('Environments block', () => {
createDeploymentWithClusterAndKubernetesNamespace(),
);
expect(findText()).toEqual(
expect(findText()).toBe(
`This job is deployed to environment using cluster ${TEST_CLUSTER_NAME} and namespace ${TEST_KUBERNETES_NAMESPACE}.`,
);
});
@ -105,11 +107,11 @@ describe('Environments block', () => {
environment: createEnvironmentWithLastDeployment(),
});
expect(findText()).toEqual(
expect(findText()).toBe(
'This job is an out-of-date deployment to environment. View the most recent deployment.',
);
expect(findJobDeploymentLink().getAttribute('href')).toEqual('bar');
expect(findJobDeploymentLink().attributes('href')).toBe('bar');
});
describe('when there is a cluster', () => {
@ -122,7 +124,7 @@ describe('Environments block', () => {
createDeploymentWithCluster(),
);
expect(findText()).toEqual(
expect(findText()).toBe(
`This job is an out-of-date deployment to environment using cluster ${TEST_CLUSTER_NAME}. View the most recent deployment.`,
);
});
@ -137,7 +139,7 @@ describe('Environments block', () => {
createDeploymentWithClusterAndKubernetesNamespace(),
);
expect(findText()).toEqual(
expect(findText()).toBe(
`This job is an out-of-date deployment to environment using cluster ${TEST_CLUSTER_NAME} and namespace ${TEST_KUBERNETES_NAMESPACE}. View the most recent deployment.`,
);
});
@ -152,7 +154,7 @@ describe('Environments block', () => {
environment,
});
expect(findText()).toEqual('This job is an out-of-date deployment to environment.');
expect(findText()).toBe('This job is an out-of-date deployment to environment.');
});
});
});
@ -164,7 +166,7 @@ describe('Environments block', () => {
environment,
});
expect(findText()).toEqual('The deployment of this job to environment did not succeed.');
expect(findText()).toBe('The deployment of this job to environment did not succeed.');
});
});
@ -176,13 +178,15 @@ describe('Environments block', () => {
environment: createEnvironmentWithLastDeployment(),
});
expect(findText()).toEqual(
expect(findText()).toBe(
'This job is creating a deployment to environment. This will overwrite the latest deployment.',
);
expect(findJobDeploymentLink().getAttribute('href')).toEqual('bar');
expect(findEnvironmentLink().getAttribute('href')).toEqual(environment.environment_path);
expect(findClusterLink()).toBeNull();
expect(findEnvironmentLink().attributes('href')).toBe(environment.environment_path);
expect(findJobDeploymentLink().attributes('href')).toBe('bar');
expect(findClusterLink().exists()).toBe(false);
});
});
@ -193,7 +197,7 @@ describe('Environments block', () => {
environment,
});
expect(findText()).toEqual('This job is creating a deployment to environment.');
expect(findText()).toBe('This job is creating a deployment to environment.');
});
describe('when there is a cluster', () => {
@ -206,7 +210,7 @@ describe('Environments block', () => {
createDeploymentWithCluster(),
);
expect(findText()).toEqual(
expect(findText()).toBe(
`This job is creating a deployment to environment using cluster ${TEST_CLUSTER_NAME}.`,
);
});
@ -220,7 +224,7 @@ describe('Environments block', () => {
environment: null,
});
expect(findEnvironmentLink()).toBeNull();
expect(findEnvironmentLink().exists()).toBe(false);
});
});
});
@ -235,11 +239,11 @@ describe('Environments block', () => {
createDeploymentWithCluster(),
);
expect(findText()).toEqual(
expect(findText()).toBe(
`This job is deployed to environment using cluster ${TEST_CLUSTER_NAME}.`,
);
expect(findClusterLink().getAttribute('href')).toEqual(TEST_CLUSTER_PATH);
expect(findClusterLink().attributes('href')).toBe(TEST_CLUSTER_PATH);
});
describe('when the cluster is missing the path', () => {
@ -254,7 +258,7 @@ describe('Environments block', () => {
expect(findText()).toContain('using cluster the-cluster.');
expect(findClusterLink()).toBeNull();
expect(findClusterLink().exists()).toBe(false);
});
});
});

View file

@ -327,4 +327,12 @@ RSpec.describe IssuablesHelper do
end
end
end
describe '#sidebar_milestone_tooltip_label' do
it 'escapes HTML in the milestone title' do
milestone = build(:milestone, title: '&lt;img onerror=alert(1)&gt;')
expect(helper.sidebar_milestone_tooltip_label(milestone)).to eq('&lt;img onerror=alert(1)&gt;<br/>Milestone')
end
end
end

View file

@ -75,6 +75,12 @@ RSpec.describe Banzai::Filter::IssueReferenceFilter do
expect(doc.text).to eq "Issue #{reference}"
end
it 'renders non-HTML tooltips' do
doc = reference_filter("Issue #{reference}")
expect(doc.at_css('a')).not_to have_attribute('data-html')
end
it 'includes default classes' do
doc = reference_filter("Issue #{reference}")
expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'

View file

@ -19,6 +19,29 @@ RSpec.describe Gitlab::Checks::BranchCheck do
end
end
context "prohibited branches check" do
it "prohibits 40-character hexadecimal branch names" do
allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e")
expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.")
end
it "doesn't prohibit a nested hexadecimal in a branch name" do
allow(subject).to receive(:branch_name).and_return("fix-267208abfe40e546f5e847444276f7d43a39503e")
expect { subject.validate! }.not_to raise_error
end
context "the feature flag is disabled" do
it "doesn't prohibit a 40-character hexadecimal branch name" do
stub_feature_flags(prohibit_hexadecimal_branch_names: false)
allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e")
expect { subject.validate! }.not_to raise_error
end
end
end
context 'protected branches check' do
before do
allow(ProtectedBranch).to receive(:protected?).with(project, 'master').and_return(true)

View file

@ -0,0 +1,58 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
let_it_be(:filepath) { File.join(Dir.tmpdir, 'decompressed_archive_size_validator_spec.gz') }
before(:all) do
create_compressed_file
end
after(:all) do
FileUtils.rm(filepath)
end
subject { described_class.new(archive_path: filepath, max_bytes: max_bytes) }
describe '#valid?' do
let(:max_bytes) { 1 }
context 'when file does not exceed allowed decompressed size' do
let(:max_bytes) { 20 }
it 'returns true' do
expect(subject.valid?).to eq(true)
end
end
context 'when file exceeds allowed decompressed size' do
it 'returns false' do
expect(subject.valid?).to eq(false)
end
end
context 'when something goes wrong during decompression' do
before do
allow(subject.archive_file).to receive(:eof?).and_raise(StandardError)
end
it 'logs and tracks raised exception' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(instance_of(StandardError))
expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(message: 'Decompressed archive size validation failed.'))
subject.valid?
end
it 'returns false' do
expect(subject.valid?).to eq(false)
end
end
end
def create_compressed_file
Zlib::GzipWriter.open(filepath) do |gz|
gz.write('Hello World!')
end
end
end

View file

@ -98,6 +98,45 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
end
end
context 'when file exceeds acceptable decompressed size' do
let(:project) { create(:project) }
let(:shared) { Gitlab::ImportExport::Shared.new(project) }
let(:filepath) { File.join(Dir.tmpdir, 'file_importer_spec.gz') }
subject { described_class.new(importable: project, archive_file: filepath, shared: shared) }
before do
Zlib::GzipWriter.open(filepath) do |gz|
gz.write('Hello World!')
end
end
context 'when validate_import_decompressed_archive_size feature flag is enabled' do
before do
stub_feature_flags(validate_import_decompressed_archive_size: true)
allow(Gitlab::ImportExport::DecompressedArchiveSizeValidator).to receive(:max_bytes).and_return(1)
end
it 'returns false' do
expect(subject.import).to eq(false)
expect(shared.errors.join).to eq('Decompressed archive size validation failed.')
end
end
context 'when validate_import_decompressed_archive_size feature flag is disabled' do
before do
stub_feature_flags(validate_import_decompressed_archive_size: false)
end
it 'skips validation' do
expect(subject).to receive(:validate_decompressed_archive_size).never
subject.import
end
end
end
def setup_files
FileUtils.mkdir_p("#{shared.export_path}/subfolder/")
FileUtils.touch(valid_file)

View file

@ -45,6 +45,21 @@ RSpec.describe Notify do
end
end
shared_examples 'it requires a group' do
context 'when given an deleted group' do
before do
# destroy group and group member
group_member.destroy!
group.destroy!
end
it 'returns NullMail type message' do
expect(Gitlab::AppLogger).to receive(:info)
expect(subject.message).to be_a(ActionMailer::Base::NullMail)
end
end
end
context 'for a project' do
shared_examples 'an assignee email' do
let(:recipient) { assignee }
@ -1388,6 +1403,7 @@ RSpec.describe Notify do
it_behaves_like "a user cannot unsubscribe through footer link"
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
it_behaves_like 'it requires a group'
it 'contains all the useful information' do
is_expected.to have_subject "Access to the #{group.name} group was granted"
@ -1422,6 +1438,7 @@ RSpec.describe Notify do
it_behaves_like "a user cannot unsubscribe through footer link"
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
it_behaves_like 'it requires a group'
it 'contains all the useful information' do
is_expected.to have_subject "Invitation to join the #{group.name} group"
@ -1448,6 +1465,7 @@ RSpec.describe Notify do
it_behaves_like "a user cannot unsubscribe through footer link"
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
it_behaves_like 'it requires a group'
it 'contains all the useful information' do
is_expected.to have_subject 'Invitation accepted'

View file

@ -83,6 +83,15 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when alert does not exist' do
context 'when alert can be created' do
it_behaves_like 'creates an alert management alert'
it 'processes the incident alert' do
expect(IncidentManagement::ProcessAlertWorker)
.to receive(:perform_async)
.with(nil, nil, kind_of(Integer))
.once
expect(subject).to be_success
end
end
context 'when alert cannot be created' do
@ -102,6 +111,13 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
execute
end
it 'does not create incident issue' do
expect(IncidentManagement::ProcessAlertWorker)
.not_to receive(:perform_async)
expect(subject).to be_success
end
end
it { is_expected.to be_success }

View file

@ -13,8 +13,9 @@ RSpec.describe AuthorizedProjectUpdate::ProjectGroupLinkCreateService do
let_it_be(:project) { create(:project, :private, group: create(:group, :private)) }
let(:access_level) { Gitlab::Access::MAINTAINER }
let(:group_access) { nil }
subject(:service) { described_class.new(project, group) }
subject(:service) { described_class.new(project, group, group_access) }
describe '#perform' do
context 'direct group members' do
@ -54,6 +55,26 @@ RSpec.describe AuthorizedProjectUpdate::ProjectGroupLinkCreateService do
end
end
context 'with group_access' do
let(:group_access) { Gitlab::Access::REPORTER }
before do
create(:group_member, access_level: access_level, group: group_parent, user: parent_group_user)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: parent_group_user.id,
access_level: group_access)
expect(project_authorization).to exist
end
end
context 'membership overrides' do
before do
create(:group_member, access_level: Gitlab::Access::REPORTER, group: group_parent, user: group_user)

View file

@ -346,44 +346,117 @@ RSpec.describe Groups::TransferService do
end
context 'when transferring a group with nested groups and projects' do
let!(:group) { create(:group, :public) }
let(:subgroup1) { create(:group, :private, parent: group) }
let!(:project1) { create(:project, :repository, :private, namespace: group) }
let!(:subgroup1) { create(:group, :private, parent: group) }
let!(:nested_subgroup) { create(:group, :private, parent: subgroup1) }
let!(:nested_project) { create(:project, :repository, :private, namespace: subgroup1) }
before do
TestEnv.clean_test_path
create(:group_member, :owner, group: new_parent_group, user: user)
transfer_service.execute(new_parent_group)
end
it 'updates subgroups path' do
new_base_path = "#{new_parent_group.path}/#{group.path}"
group.children.each do |children|
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
context 'updated paths' do
let(:group) { create(:group, :public) }
before do
transfer_service.execute(new_parent_group)
end
new_base_path = "#{new_parent_group.path}/#{group.path}/#{subgroup1.path}"
subgroup1.children.each do |children|
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
it 'updates subgroups path' do
new_base_path = "#{new_parent_group.path}/#{group.path}"
group.children.each do |children|
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
end
new_base_path = "#{new_parent_group.path}/#{group.path}/#{subgroup1.path}"
subgroup1.children.each do |children|
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
end
end
it 'updates projects path' do
new_parent_path = "#{new_parent_group.path}/#{group.path}"
subgroup1.projects.each do |project|
project_full_path = "#{new_parent_path}/#{project.namespace.path}/#{project.name}"
expect(project.full_path).to eq(project_full_path)
end
end
it 'creates redirect for the subgroups and projects' do
expect(group.redirect_routes.count).to eq(1)
expect(project1.redirect_routes.count).to eq(1)
expect(subgroup1.redirect_routes.count).to eq(1)
expect(nested_subgroup.redirect_routes.count).to eq(1)
expect(nested_project.redirect_routes.count).to eq(1)
end
end
it 'updates projects path' do
new_parent_path = "#{new_parent_group.path}/#{group.path}"
subgroup1.projects.each do |project|
project_full_path = "#{new_parent_path}/#{project.namespace.path}/#{project.name}"
expect(project.full_path).to eq(project_full_path)
end
end
context 'resets project authorizations' do
let(:old_parent_group) { create(:group) }
let(:group) { create(:group, :private, parent: old_parent_group) }
let(:new_group_member) { create(:user) }
let(:old_group_member) { create(:user) }
it 'creates redirect for the subgroups and projects' do
expect(group.redirect_routes.count).to eq(1)
expect(project1.redirect_routes.count).to eq(1)
expect(subgroup1.redirect_routes.count).to eq(1)
expect(nested_subgroup.redirect_routes.count).to eq(1)
expect(nested_project.redirect_routes.count).to eq(1)
before do
new_parent_group.add_maintainer(new_group_member)
old_parent_group.add_maintainer(old_group_member)
group.refresh_members_authorized_projects
end
it 'removes old project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: project1.id, user_id: old_group_member.id).size
}.from(1).to(0)
end
it 'adds new project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: project1.id, user_id: new_group_member.id).size
}.from(0).to(1)
end
it 'performs authorizations job immediately' do
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_inline)
transfer_service.execute(new_parent_group)
end
context 'for nested projects' do
it 'removes old project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: nested_project.id, user_id: old_group_member.id).size
}.from(1).to(0)
end
it 'adds new project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: nested_project.id, user_id: new_group_member.id).size
}.from(0).to(1)
end
end
context 'for groups with many members' do
before do
11.times do
new_parent_group.add_maintainer(create(:user))
end
end
it 'adds new project authorizations for the user which makes a transfer' do
transfer_service.execute(new_parent_group)
expect(ProjectAuthorization.where(project_id: project1.id, user_id: user.id).size).to eq(1)
expect(ProjectAuthorization.where(project_id: nested_project.id, user_id: user.id).size).to eq(1)
end
it 'schedules authorizations job' do
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async)
.with(array_including(new_parent_group.members_with_parents.pluck(:user_id).map {|id| [id, anything] }))
transfer_service.execute(new_parent_group)
end
end
end
end

View file

@ -6,9 +6,10 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
let_it_be(:project) { create :project }
let(:group_access) { Gitlab::Access::DEVELOPER }
let(:opts) do
{
link_group_access: '30',
link_group_access: group_access,
expires_at: nil
}
end
@ -49,7 +50,9 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
receive(:bulk_perform_async)
)
expect(AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker).to(
receive(:perform_async).and_call_original
receive(:perform_async)
.with(project.id, group.id, group_access)
.and_call_original
)
expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
receive(:bulk_perform_in)

View file

@ -16,16 +16,62 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
subject { described_class.new.perform(nil, nil, alert.id) }
before do
allow(Gitlab::AppLogger).to receive(:warn).and_call_original
allow(IncidentManagement::CreateIssueService)
.to receive(:new).with(alert.project, parsed_payload)
.and_call_original
end
it 'creates an issue' do
expect(IncidentManagement::CreateIssueService)
.to receive(:new).with(alert.project, parsed_payload)
shared_examples 'creates issue successfully' do
it 'creates an issue' do
expect(IncidentManagement::CreateIssueService)
.to receive(:new).with(alert.project, parsed_payload)
expect { subject }.to change { Issue.count }.by(1)
expect { subject }.to change { Issue.count }.by(1)
end
it 'updates AlertManagement::Alert#issue_id' do
subject
expect(alert.reload.issue_id).to eq(created_issue.id)
end
it 'does not write a warning to log' do
subject
expect(Gitlab::AppLogger).not_to have_received(:warn)
end
end
context 'with valid alert' do
it_behaves_like 'creates issue successfully'
context 'when alert cannot be updated' do
let_it_be(:alert) { create(:alert_management_alert, :with_validation_errors, project: project, payload: payload) }
it 'updates AlertManagement::Alert#issue_id' do
expect { subject }.not_to change { alert.reload.issue_id }
end
it 'logs a warning' do
subject
expect(Gitlab::AppLogger).to have_received(:warn).with(
message: 'Cannot link an Issue with Alert',
issue_id: created_issue.id,
alert_id: alert.id,
alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
end
end
context 'prometheus alert' do
let_it_be(:alert) { create(:alert_management_alert, :prometheus, project: project, started_at: started_at) }
let_it_be(:parsed_payload) { alert.payload }
it_behaves_like 'creates issue successfully'
end
end
context 'with invalid alert' do
@ -39,44 +85,5 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
expect { subject }.not_to change { Issue.count }
end
end
context 'with valid alert' do
before do
allow(Gitlab::AppLogger).to receive(:warn).and_call_original
end
context 'when alert can be updated' do
it 'updates AlertManagement::Alert#issue_id' do
subject
expect(alert.reload.issue_id).to eq(created_issue.id)
end
it 'does not write a warning to log' do
subject
expect(Gitlab::AppLogger).not_to have_received(:warn)
end
context 'when alert cannot be updated' do
let_it_be(:alert) { create(:alert_management_alert, :with_validation_errors, project: project, payload: payload) }
it 'updates AlertManagement::Alert#issue_id' do
expect { subject }.not_to change { alert.reload.issue_id }
end
it 'logs a warning' do
subject
expect(Gitlab::AppLogger).to have_received(:warn).with(
message: 'Cannot link an Issue with Alert',
issue_id: created_issue.id,
alert_id: alert.id,
alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
end
end
end
end
end
end