Update upstream source from tag 'upstream/13.6.5'
Update to upstream version '13.6.5'
with Debian dir c236b94d07
This commit is contained in:
commit
c31bdba3c2
45 changed files with 0 additions and 2158 deletions
|
@ -1,215 +0,0 @@
|
|||
<script>
|
||||
import { GlLineChart } from '@gitlab/ui/dist/charts';
|
||||
import { GlAlert } from '@gitlab/ui';
|
||||
import { mapKeys, mapValues, pick, some, sum } from 'lodash';
|
||||
import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue';
|
||||
import { s__ } from '~/locale';
|
||||
import {
|
||||
differenceInMonths,
|
||||
formatDateAsMonth,
|
||||
getDayDifference,
|
||||
} from '~/lib/utils/datetime_utility';
|
||||
import { getAverageByMonth, sortByDate, extractValues } from '../utils';
|
||||
import pipelineStatsQuery from '../graphql/queries/pipeline_stats.query.graphql';
|
||||
import { TODAY, START_DATE } from '../constants';
|
||||
|
||||
const DATA_KEYS = [
|
||||
'pipelinesTotal',
|
||||
'pipelinesSucceeded',
|
||||
'pipelinesFailed',
|
||||
'pipelinesCanceled',
|
||||
'pipelinesSkipped',
|
||||
];
|
||||
const PREFIX = 'pipelines';
|
||||
|
||||
export default {
|
||||
name: 'PipelinesChart',
|
||||
components: {
|
||||
GlLineChart,
|
||||
GlAlert,
|
||||
ChartSkeletonLoader,
|
||||
},
|
||||
startDate: START_DATE,
|
||||
endDate: TODAY,
|
||||
i18n: {
|
||||
loadPipelineChartError: s__(
|
||||
'InstanceAnalytics|Could not load the pipelines chart. Please refresh the page to try again.',
|
||||
),
|
||||
noDataMessage: s__('InstanceAnalytics|There is no data available.'),
|
||||
total: s__('InstanceAnalytics|Total'),
|
||||
succeeded: s__('InstanceAnalytics|Succeeded'),
|
||||
failed: s__('InstanceAnalytics|Failed'),
|
||||
canceled: s__('InstanceAnalytics|Canceled'),
|
||||
skipped: s__('InstanceAnalytics|Skipped'),
|
||||
chartTitle: s__('InstanceAnalytics|Pipelines'),
|
||||
yAxisTitle: s__('InstanceAnalytics|Items'),
|
||||
xAxisTitle: s__('InstanceAnalytics|Month'),
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
loading: true,
|
||||
loadingError: null,
|
||||
};
|
||||
},
|
||||
apollo: {
|
||||
pipelineStats: {
|
||||
query: pipelineStatsQuery,
|
||||
variables() {
|
||||
return {
|
||||
firstTotal: this.totalDaysToShow,
|
||||
firstSucceeded: this.totalDaysToShow,
|
||||
firstFailed: this.totalDaysToShow,
|
||||
firstCanceled: this.totalDaysToShow,
|
||||
firstSkipped: this.totalDaysToShow,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
const allData = extractValues(data, DATA_KEYS, PREFIX, 'nodes');
|
||||
const allPageInfo = extractValues(data, DATA_KEYS, PREFIX, 'pageInfo');
|
||||
|
||||
return {
|
||||
...mapValues(allData, sortByDate),
|
||||
...allPageInfo,
|
||||
};
|
||||
},
|
||||
result() {
|
||||
if (this.hasNextPage) {
|
||||
this.fetchNextPage();
|
||||
}
|
||||
},
|
||||
error() {
|
||||
this.handleError();
|
||||
},
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
isLoading() {
|
||||
return this.$apollo.queries.pipelineStats.loading;
|
||||
},
|
||||
totalDaysToShow() {
|
||||
return getDayDifference(this.$options.startDate, this.$options.endDate);
|
||||
},
|
||||
firstVariables() {
|
||||
const allData = pick(this.pipelineStats, [
|
||||
'nodesTotal',
|
||||
'nodesSucceeded',
|
||||
'nodesFailed',
|
||||
'nodesCanceled',
|
||||
'nodesSkipped',
|
||||
]);
|
||||
const allDayDiffs = mapValues(allData, data => {
|
||||
const firstdataPoint = data[0];
|
||||
if (!firstdataPoint) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return Math.max(
|
||||
0,
|
||||
getDayDifference(this.$options.startDate, new Date(firstdataPoint.recordedAt)),
|
||||
);
|
||||
});
|
||||
|
||||
return mapKeys(allDayDiffs, (value, key) => key.replace('nodes', 'first'));
|
||||
},
|
||||
cursorVariables() {
|
||||
const pageInfoKeys = [
|
||||
'pageInfoTotal',
|
||||
'pageInfoSucceeded',
|
||||
'pageInfoFailed',
|
||||
'pageInfoCanceled',
|
||||
'pageInfoSkipped',
|
||||
];
|
||||
|
||||
return extractValues(this.pipelineStats, pageInfoKeys, 'pageInfo', 'endCursor');
|
||||
},
|
||||
hasNextPage() {
|
||||
return (
|
||||
sum(Object.values(this.firstVariables)) > 0 &&
|
||||
some(this.pipelineStats, ({ hasNextPage }) => hasNextPage)
|
||||
);
|
||||
},
|
||||
hasEmptyDataSet() {
|
||||
return this.chartData.every(({ data }) => data.length === 0);
|
||||
},
|
||||
chartData() {
|
||||
const allData = pick(this.pipelineStats, [
|
||||
'nodesTotal',
|
||||
'nodesSucceeded',
|
||||
'nodesFailed',
|
||||
'nodesCanceled',
|
||||
'nodesSkipped',
|
||||
]);
|
||||
const options = { shouldRound: true };
|
||||
return Object.keys(allData).map(key => {
|
||||
const i18nName = key.slice('nodes'.length).toLowerCase();
|
||||
return {
|
||||
name: this.$options.i18n[i18nName],
|
||||
data: getAverageByMonth(allData[key], options),
|
||||
};
|
||||
});
|
||||
},
|
||||
range() {
|
||||
return {
|
||||
min: this.$options.startDate,
|
||||
max: this.$options.endDate,
|
||||
};
|
||||
},
|
||||
chartOptions() {
|
||||
const { endDate, startDate, i18n } = this.$options;
|
||||
return {
|
||||
xAxis: {
|
||||
...this.range,
|
||||
name: i18n.xAxisTitle,
|
||||
type: 'time',
|
||||
splitNumber: differenceInMonths(startDate, endDate) + 1,
|
||||
axisLabel: {
|
||||
interval: 0,
|
||||
showMinLabel: false,
|
||||
showMaxLabel: false,
|
||||
align: 'right',
|
||||
formatter: formatDateAsMonth,
|
||||
},
|
||||
},
|
||||
yAxis: {
|
||||
name: i18n.yAxisTitle,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
handleError() {
|
||||
this.loadingError = true;
|
||||
},
|
||||
fetchNextPage() {
|
||||
this.$apollo.queries.pipelineStats
|
||||
.fetchMore({
|
||||
variables: {
|
||||
...this.firstVariables,
|
||||
...this.cursorVariables,
|
||||
},
|
||||
updateQuery: (previousResult, { fetchMoreResult }) => {
|
||||
return Object.keys(fetchMoreResult).reduce((memo, key) => {
|
||||
const { nodes, ...rest } = fetchMoreResult[key];
|
||||
const previousNodes = previousResult[key].nodes;
|
||||
return { ...memo, [key]: { ...rest, nodes: [...previousNodes, ...nodes] } };
|
||||
}, {});
|
||||
},
|
||||
})
|
||||
.catch(this.handleError);
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div>
|
||||
<h3>{{ $options.i18n.chartTitle }}</h3>
|
||||
<gl-alert v-if="loadingError" variant="danger" :dismissible="false" class="gl-mt-3">
|
||||
{{ this.$options.i18n.loadPipelineChartError }}
|
||||
</gl-alert>
|
||||
<chart-skeleton-loader v-else-if="isLoading" />
|
||||
<gl-alert v-else-if="hasEmptyDataSet" variant="info" :dismissible="false" class="gl-mt-3">
|
||||
{{ $options.i18n.noDataMessage }}
|
||||
</gl-alert>
|
||||
<gl-line-chart v-else :option="chartOptions" :include-legend-avg-max="true" :data="chartData" />
|
||||
</div>
|
||||
</template>
|
|
@ -1,76 +0,0 @@
|
|||
#import "~/graphql_shared/fragments/pageInfo.fragment.graphql"
|
||||
#import "./count.fragment.graphql"
|
||||
|
||||
query pipelineStats(
|
||||
$firstTotal: Int
|
||||
$firstSucceeded: Int
|
||||
$firstFailed: Int
|
||||
$firstCanceled: Int
|
||||
$firstSkipped: Int
|
||||
$endCursorTotal: String
|
||||
$endCursorSucceeded: String
|
||||
$endCursorFailed: String
|
||||
$endCursorCanceled: String
|
||||
$endCursorSkipped: String
|
||||
) {
|
||||
pipelinesTotal: instanceStatisticsMeasurements(
|
||||
identifier: PIPELINES
|
||||
first: $firstTotal
|
||||
after: $endCursorTotal
|
||||
) {
|
||||
nodes {
|
||||
...Count
|
||||
}
|
||||
pageInfo {
|
||||
...PageInfo
|
||||
}
|
||||
}
|
||||
pipelinesSucceeded: instanceStatisticsMeasurements(
|
||||
identifier: PIPELINES_SUCCEEDED
|
||||
first: $firstSucceeded
|
||||
after: $endCursorSucceeded
|
||||
) {
|
||||
nodes {
|
||||
...Count
|
||||
}
|
||||
pageInfo {
|
||||
...PageInfo
|
||||
}
|
||||
}
|
||||
pipelinesFailed: instanceStatisticsMeasurements(
|
||||
identifier: PIPELINES_FAILED
|
||||
first: $firstFailed
|
||||
after: $endCursorFailed
|
||||
) {
|
||||
nodes {
|
||||
...Count
|
||||
}
|
||||
pageInfo {
|
||||
...PageInfo
|
||||
}
|
||||
}
|
||||
pipelinesCanceled: instanceStatisticsMeasurements(
|
||||
identifier: PIPELINES_CANCELED
|
||||
first: $firstCanceled
|
||||
after: $endCursorCanceled
|
||||
) {
|
||||
nodes {
|
||||
...Count
|
||||
}
|
||||
pageInfo {
|
||||
...PageInfo
|
||||
}
|
||||
}
|
||||
pipelinesSkipped: instanceStatisticsMeasurements(
|
||||
identifier: PIPELINES_SKIPPED
|
||||
first: $firstSkipped
|
||||
after: $endCursorSkipped
|
||||
) {
|
||||
nodes {
|
||||
...Count
|
||||
}
|
||||
pageInfo {
|
||||
...PageInfo
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
export const performanceMarkAndMeasure = ({ mark, measures = [] } = {}) => {
|
||||
window.requestAnimationFrame(() => {
|
||||
if (mark && !performance.getEntriesByName(mark).length) {
|
||||
performance.mark(mark);
|
||||
}
|
||||
measures.forEach(measure => {
|
||||
performance.measure(measure.name, measure.start, measure.end);
|
||||
});
|
||||
});
|
||||
};
|
|
@ -1,100 +0,0 @@
|
|||
<script>
|
||||
import { mapState } from 'vuex';
|
||||
import { GlDropdown, GlDropdownItem, GlDropdownDivider } from '@gitlab/ui';
|
||||
import { setUrlParams, visitUrl } from '~/lib/utils/url_utility';
|
||||
import { sprintf, s__ } from '~/locale';
|
||||
|
||||
export default {
|
||||
name: 'DropdownFilter',
|
||||
components: {
|
||||
GlDropdown,
|
||||
GlDropdownItem,
|
||||
GlDropdownDivider,
|
||||
},
|
||||
props: {
|
||||
filterData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
...mapState(['query']),
|
||||
scope() {
|
||||
return this.query.scope;
|
||||
},
|
||||
supportedScopes() {
|
||||
return Object.values(this.filterData.scopes);
|
||||
},
|
||||
initialFilter() {
|
||||
return this.query[this.filterData.filterParam];
|
||||
},
|
||||
filter() {
|
||||
return this.initialFilter || this.filterData.filters.ANY.value;
|
||||
},
|
||||
filtersArray() {
|
||||
return this.filterData.filterByScope[this.scope];
|
||||
},
|
||||
selectedFilter: {
|
||||
get() {
|
||||
if (this.filtersArray.some(({ value }) => value === this.filter)) {
|
||||
return this.filter;
|
||||
}
|
||||
|
||||
return this.filterData.filters.ANY.value;
|
||||
},
|
||||
set(filter) {
|
||||
visitUrl(setUrlParams({ [this.filterData.filterParam]: filter }));
|
||||
},
|
||||
},
|
||||
selectedFilterText() {
|
||||
const f = this.filtersArray.find(({ value }) => value === this.selectedFilter);
|
||||
if (!f || f === this.filterData.filters.ANY) {
|
||||
return sprintf(s__('Any %{header}'), { header: this.filterData.header });
|
||||
}
|
||||
|
||||
return f.label;
|
||||
},
|
||||
showDropdown() {
|
||||
return this.supportedScopes.includes(this.scope);
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
dropDownItemClass(filter) {
|
||||
return {
|
||||
'gl-border-b-solid gl-border-b-gray-100 gl-border-b-1 gl-pb-2! gl-mb-2':
|
||||
filter === this.filterData.filters.ANY,
|
||||
};
|
||||
},
|
||||
isFilterSelected(filter) {
|
||||
return filter === this.selectedFilter;
|
||||
},
|
||||
handleFilterChange(filter) {
|
||||
this.selectedFilter = filter;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<gl-dropdown
|
||||
v-if="showDropdown"
|
||||
:text="selectedFilterText"
|
||||
class="col-3 gl-pt-4 gl-pl-0 gl-pr-0 gl-mr-4"
|
||||
menu-class="gl-w-full! gl-pl-0"
|
||||
>
|
||||
<header class="gl-text-center gl-font-weight-bold gl-font-lg">
|
||||
{{ filterData.header }}
|
||||
</header>
|
||||
<gl-dropdown-divider />
|
||||
<gl-dropdown-item
|
||||
v-for="f in filtersArray"
|
||||
:key="f.value"
|
||||
:is-check-item="true"
|
||||
:is-checked="isFilterSelected(f.value)"
|
||||
:class="dropDownItemClass(f)"
|
||||
@click="handleFilterChange(f.value)"
|
||||
>
|
||||
{{ f.label }}
|
||||
</gl-dropdown-item>
|
||||
</gl-dropdown>
|
||||
</template>
|
|
@ -1,36 +0,0 @@
|
|||
import { __ } from '~/locale';
|
||||
|
||||
const header = __('Confidentiality');
|
||||
|
||||
const filters = {
|
||||
ANY: {
|
||||
label: __('Any'),
|
||||
value: null,
|
||||
},
|
||||
CONFIDENTIAL: {
|
||||
label: __('Confidential'),
|
||||
value: 'yes',
|
||||
},
|
||||
NOT_CONFIDENTIAL: {
|
||||
label: __('Not confidential'),
|
||||
value: 'no',
|
||||
},
|
||||
};
|
||||
|
||||
const scopes = {
|
||||
ISSUES: 'issues',
|
||||
};
|
||||
|
||||
const filterByScope = {
|
||||
[scopes.ISSUES]: [filters.ANY, filters.CONFIDENTIAL, filters.NOT_CONFIDENTIAL],
|
||||
};
|
||||
|
||||
const filterParam = 'confidential';
|
||||
|
||||
export default {
|
||||
header,
|
||||
filters,
|
||||
scopes,
|
||||
filterByScope,
|
||||
filterParam,
|
||||
};
|
|
@ -1,42 +0,0 @@
|
|||
import { __ } from '~/locale';
|
||||
|
||||
const header = __('Status');
|
||||
|
||||
const filters = {
|
||||
ANY: {
|
||||
label: __('Any'),
|
||||
value: 'all',
|
||||
},
|
||||
OPEN: {
|
||||
label: __('Open'),
|
||||
value: 'opened',
|
||||
},
|
||||
CLOSED: {
|
||||
label: __('Closed'),
|
||||
value: 'closed',
|
||||
},
|
||||
MERGED: {
|
||||
label: __('Merged'),
|
||||
value: 'merged',
|
||||
},
|
||||
};
|
||||
|
||||
const scopes = {
|
||||
ISSUES: 'issues',
|
||||
MERGE_REQUESTS: 'merge_requests',
|
||||
};
|
||||
|
||||
const filterByScope = {
|
||||
[scopes.ISSUES]: [filters.ANY, filters.OPEN, filters.CLOSED],
|
||||
[scopes.MERGE_REQUESTS]: [filters.ANY, filters.OPEN, filters.MERGED, filters.CLOSED],
|
||||
};
|
||||
|
||||
const filterParam = 'state';
|
||||
|
||||
export default {
|
||||
header,
|
||||
filters,
|
||||
scopes,
|
||||
filterByScope,
|
||||
filterParam,
|
||||
};
|
|
@ -1,38 +0,0 @@
|
|||
import Vue from 'vue';
|
||||
import Translate from '~/vue_shared/translate';
|
||||
import DropdownFilter from './components/dropdown_filter.vue';
|
||||
import stateFilterData from './constants/state_filter_data';
|
||||
import confidentialFilterData from './constants/confidential_filter_data';
|
||||
|
||||
Vue.use(Translate);
|
||||
|
||||
const mountDropdownFilter = (store, { id, filterData }) => {
|
||||
const el = document.getElementById(id);
|
||||
|
||||
if (!el) return false;
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
store,
|
||||
render(createElement) {
|
||||
return createElement(DropdownFilter, {
|
||||
props: {
|
||||
filterData,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const dropdownFilters = [
|
||||
{
|
||||
id: 'js-search-filter-by-state',
|
||||
filterData: stateFilterData,
|
||||
},
|
||||
{
|
||||
id: 'js-search-filter-by-confidential',
|
||||
filterData: confidentialFilterData,
|
||||
},
|
||||
];
|
||||
|
||||
export default store => [...dropdownFilters].map(filter => mountDropdownFilter(store, filter));
|
|
@ -1,96 +0,0 @@
|
|||
@import 'mixins_and_variables_and_functions';
|
||||
|
||||
.signup-page {
|
||||
.page-wrap {
|
||||
background-color: var(--gray-10, $gray-10);
|
||||
}
|
||||
|
||||
.signup-box-container {
|
||||
max-width: 960px;
|
||||
}
|
||||
|
||||
.signup-box {
|
||||
background-color: var(--white, $white);
|
||||
box-shadow: 0 0 0 1px var(--border-color, $border-color);
|
||||
border-radius: $border-radius;
|
||||
}
|
||||
|
||||
.form-control {
|
||||
&:active,
|
||||
&:focus {
|
||||
background-color: var(--white, $white);
|
||||
}
|
||||
}
|
||||
|
||||
.devise-errors {
|
||||
h2 {
|
||||
font-size: $gl-font-size;
|
||||
color: var(--red-700, $red-700);
|
||||
}
|
||||
}
|
||||
|
||||
.omniauth-divider {
|
||||
&::before,
|
||||
&::after {
|
||||
content: '';
|
||||
flex: 1;
|
||||
border-bottom: 1px solid var(--gray-100, $gray-100);
|
||||
margin: $gl-padding-24 0;
|
||||
}
|
||||
|
||||
&::before {
|
||||
margin-right: $gl-padding;
|
||||
}
|
||||
|
||||
&::after {
|
||||
margin-left: $gl-padding;
|
||||
}
|
||||
}
|
||||
|
||||
.omniauth-btn {
|
||||
width: 48%;
|
||||
|
||||
@include media-breakpoint-down(md) {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
img {
|
||||
width: $default-icon-size;
|
||||
height: $default-icon-size;
|
||||
}
|
||||
}
|
||||
|
||||
.decline-page {
|
||||
width: 350px;
|
||||
}
|
||||
}
|
||||
|
||||
.signup-page[data-page^='registrations:experience_levels'] {
|
||||
$card-shadow-color: rgba(var(--black, $black), 0.2);
|
||||
|
||||
.page-wrap {
|
||||
background-color: var(--white, $white);
|
||||
}
|
||||
|
||||
.card-deck {
|
||||
max-width: 828px;
|
||||
}
|
||||
|
||||
.card {
|
||||
transition: box-shadow 0.3s ease-in-out;
|
||||
}
|
||||
|
||||
.card:hover {
|
||||
box-shadow: 0 $gl-spacing-scale-3 $gl-spacing-scale-5 $card-shadow-color;
|
||||
}
|
||||
|
||||
@media (min-width: $breakpoint-sm) {
|
||||
.card-deck .card {
|
||||
margin: 0 $gl-spacing-scale-3;
|
||||
}
|
||||
}
|
||||
|
||||
.stretched-link:hover {
|
||||
text-decoration: none;
|
||||
}
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
module BuildTraceChunks
|
||||
class LegacyFog
|
||||
def available?
|
||||
object_store.enabled
|
||||
end
|
||||
|
||||
def data(model)
|
||||
connection.get_object(bucket_name, key(model))[:body]
|
||||
rescue Excon::Error::NotFound
|
||||
# If the object does not exist in the object storage, this method returns nil.
|
||||
end
|
||||
|
||||
def set_data(model, new_data)
|
||||
connection.put_object(bucket_name, key(model), new_data)
|
||||
end
|
||||
|
||||
def append_data(model, new_data, offset)
|
||||
if offset > 0
|
||||
truncated_data = data(model).to_s.byteslice(0, offset)
|
||||
new_data = truncated_data + new_data
|
||||
end
|
||||
|
||||
set_data(model, new_data)
|
||||
new_data.bytesize
|
||||
end
|
||||
|
||||
def size(model)
|
||||
data(model).to_s.bytesize
|
||||
end
|
||||
|
||||
def delete_data(model)
|
||||
delete_keys([[model.build_id, model.chunk_index]])
|
||||
end
|
||||
|
||||
def keys(relation)
|
||||
return [] unless available?
|
||||
|
||||
relation.pluck(:build_id, :chunk_index)
|
||||
end
|
||||
|
||||
def delete_keys(keys)
|
||||
keys.each do |key|
|
||||
connection.delete_object(bucket_name, key_raw(*key))
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def key(model)
|
||||
key_raw(model.build_id, model.chunk_index)
|
||||
end
|
||||
|
||||
def key_raw(build_id, chunk_index)
|
||||
"tmp/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log"
|
||||
end
|
||||
|
||||
def bucket_name
|
||||
return unless available?
|
||||
|
||||
object_store.remote_directory
|
||||
end
|
||||
|
||||
def connection
|
||||
return unless available?
|
||||
|
||||
@connection ||= ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
|
||||
end
|
||||
|
||||
def object_store
|
||||
Gitlab.config.artifacts.object_store
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,7 +0,0 @@
|
|||
.d-lg-flex.align-items-end
|
||||
#js-search-filter-by-state{ 'v-cloak': true }
|
||||
- if Feature.enabled?(:search_filter_by_confidential, @group)
|
||||
#js-search-filter-by-confidential{ 'v-cloak': true }
|
||||
|
||||
- if %w(issues merge_requests).include?(@scope)
|
||||
%hr.gl-mt-4.gl-mb-4
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: admin_approval_for_new_user_signups
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43827
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/258980
|
||||
type: development
|
||||
group: group::access
|
||||
default_enabled: true
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: ci_always_refresh_merge_requests_from_beginning
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45232
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/268215
|
||||
type: development
|
||||
group: group::continuous integration
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: ci_delete_objects_low_concurrency
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39464
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/247103
|
||||
group: group::continuous integration
|
||||
type: development
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: ci_send_deployment_hook_when_start
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41214
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/247137
|
||||
group: group::progressive delivery
|
||||
type: development
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: ci_trace_new_fog_store
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46209
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/273405
|
||||
type: development
|
||||
group: group::testing
|
||||
default_enabled: true
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: deploy_boards_dedupe_instances
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40768
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/258214
|
||||
type: development
|
||||
group: group::progressive delivery
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: deployment_filters
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/44041
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/267561
|
||||
type: development
|
||||
group: group::source code
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: disable_shared_runners_on_group
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36080
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/258991
|
||||
type: development
|
||||
group: group::runner
|
||||
default_enabled: true
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: expose_environment_path_in_alert_details
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43414
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/258638
|
||||
type: development
|
||||
group: group::progressive delivery
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: kubernetes_cluster_namespace_role_admin
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45479
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/270030
|
||||
type: development
|
||||
group: group::configure
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: merge_base_pipelines
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/44648
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/263724
|
||||
type: development
|
||||
group: group::testing
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: one_dimensional_matrix
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/42170
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/256062
|
||||
type: development
|
||||
group: group::pipeline authoring
|
||||
default_enabled: true
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: search_filter_by_confidential
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40793
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/244923
|
||||
group: group::global search
|
||||
type: development
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: soft_fail_count_by_state
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/44184
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/263222
|
||||
type: development
|
||||
group: group::source code
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: sync_metrics_dashboards
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39658
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/241793
|
||||
group: group::apm
|
||||
type: development
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: track_unique_test_cases_parsed
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41918
|
||||
rollout_issue_url:
|
||||
group: group::testing
|
||||
type: development
|
||||
default_enabled: false
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: incident_sla
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43648
|
||||
rollout_issue_url:
|
||||
group: group::health
|
||||
type: licensed
|
||||
default_enabled: true
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: minimal_access_role
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40942
|
||||
rollout_issue_url:
|
||||
group: group::access
|
||||
type: licensed
|
||||
default_enabled: true
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: resource_access_token
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/29622
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/235765
|
||||
group: group::access
|
||||
type: licensed
|
||||
default_enabled: true
|
|
@ -1,18 +0,0 @@
|
|||
# Instance Statistics
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/235754) in GitLab 13.4.
|
||||
|
||||
Instance Statistics gives you an overview of how much data your instance contains, and how quickly this volume is changing over time.
|
||||
|
||||
## Total counts
|
||||
|
||||
At the top of the page, Instance Statistics shows total counts for:
|
||||
|
||||
- Users
|
||||
- Projects
|
||||
- Groups
|
||||
- Issues
|
||||
- Merge Requests
|
||||
- Pipelines
|
||||
|
||||
These figures can be useful for understanding how much data your instance contains in total.
|
|
@ -1,72 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BulkImport
|
||||
class Client
|
||||
API_VERSION = 'v4'.freeze
|
||||
DEFAULT_PAGE = 1.freeze
|
||||
DEFAULT_PER_PAGE = 30.freeze
|
||||
|
||||
ConnectionError = Class.new(StandardError)
|
||||
|
||||
def initialize(uri:, token:, page: DEFAULT_PAGE, per_page: DEFAULT_PER_PAGE, api_version: API_VERSION)
|
||||
@uri = URI.parse(uri)
|
||||
@token = token&.strip
|
||||
@page = page
|
||||
@per_page = per_page
|
||||
@api_version = api_version
|
||||
end
|
||||
|
||||
def get(resource, query = {})
|
||||
response = with_error_handling do
|
||||
Gitlab::HTTP.get(
|
||||
resource_url(resource),
|
||||
headers: request_headers,
|
||||
follow_redirects: false,
|
||||
query: query.merge(request_query)
|
||||
)
|
||||
end
|
||||
|
||||
response.parsed_response
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def request_query
|
||||
{
|
||||
page: @page,
|
||||
per_page: @per_page
|
||||
}
|
||||
end
|
||||
|
||||
def request_headers
|
||||
{
|
||||
'Content-Type' => 'application/json',
|
||||
'Authorization' => "Bearer #{@token}"
|
||||
}
|
||||
end
|
||||
|
||||
def with_error_handling
|
||||
response = yield
|
||||
|
||||
raise ConnectionError.new("Error #{response.code}") unless response.success?
|
||||
|
||||
response
|
||||
rescue *Gitlab::HTTP::HTTP_ERRORS => e
|
||||
raise ConnectionError, e
|
||||
end
|
||||
|
||||
def base_uri
|
||||
@base_uri ||= "#{@uri.scheme}://#{@uri.host}:#{@uri.port}"
|
||||
end
|
||||
|
||||
def api_url
|
||||
Gitlab::Utils.append_path(base_uri, "/api/#{@api_version}")
|
||||
end
|
||||
|
||||
def resource_url(resource)
|
||||
Gitlab::Utils.append_path(api_url, resource)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,51 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module ImportExport
|
||||
module Project
|
||||
module Sample
|
||||
class SampleDataRelationTreeRestorer < RelationTreeRestorer
|
||||
DATE_MODELS = %i[issues milestones].freeze
|
||||
|
||||
def initialize(*args)
|
||||
super
|
||||
|
||||
date_calculator
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def build_relation(relation_key, relation_definition, data_hash)
|
||||
# Override due date attributes in data hash for Sample Data templates
|
||||
# Dates are moved by taking the closest one to average and moving that (and rest around it) to the date of import
|
||||
# TODO: To move this logic to RelationFactory (see: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41699#note_430465333)
|
||||
override_date_attributes!(relation_key, data_hash)
|
||||
super
|
||||
end
|
||||
|
||||
def override_date_attributes!(relation_key, data_hash)
|
||||
return unless DATE_MODELS.include?(relation_key.to_sym)
|
||||
|
||||
data_hash['start_date'] = date_calculator.calculate_by_closest_date_to_average(data_hash['start_date'].to_time) unless data_hash['start_date'].nil?
|
||||
data_hash['due_date'] = date_calculator.calculate_by_closest_date_to_average(data_hash['due_date'].to_time) unless data_hash['due_date'].nil?
|
||||
end
|
||||
|
||||
# TODO: Move clear logic into main comsume_relation method (see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41699#note_430465330)
|
||||
def dates
|
||||
unless relation_reader.legacy?
|
||||
DATE_MODELS.map do |tag|
|
||||
relation_reader.consume_relation(@importable_path, tag).map { |model| model.first['due_date'] }.tap do
|
||||
relation_reader.clear_consumed_relations
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def date_calculator
|
||||
@date_calculator ||= Gitlab::ImportExport::Project::Sample::DateCalculator.new(dates)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,61 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Middleware
|
||||
# There is no valid reason for a request to contain a null byte (U+0000)
|
||||
# so just return HTTP 400 (Bad Request) if we receive one
|
||||
class HandleNullBytes
|
||||
NULL_BYTE_REGEX = Regexp.new(Regexp.escape("\u0000")).freeze
|
||||
|
||||
attr_reader :app
|
||||
|
||||
def initialize(app)
|
||||
@app = app
|
||||
end
|
||||
|
||||
def call(env)
|
||||
return [400, {}, ["Bad Request"]] if request_has_null_byte?(env)
|
||||
|
||||
app.call(env)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def request_has_null_byte?(request)
|
||||
return false if ENV['REJECT_NULL_BYTES'] == "1"
|
||||
|
||||
request = Rack::Request.new(request)
|
||||
|
||||
request.params.values.any? do |value|
|
||||
param_has_null_byte?(value)
|
||||
end
|
||||
end
|
||||
|
||||
def param_has_null_byte?(value, depth = 0)
|
||||
# Guard against possible attack sending large amounts of nested params
|
||||
# Should be safe as deeply nested params are highly uncommon.
|
||||
return false if depth > 2
|
||||
|
||||
depth += 1
|
||||
|
||||
if value.respond_to?(:match)
|
||||
string_contains_null_byte?(value)
|
||||
elsif value.respond_to?(:values)
|
||||
value.values.any? do |hash_value|
|
||||
param_has_null_byte?(hash_value, depth)
|
||||
end
|
||||
elsif value.is_a?(Array)
|
||||
value.any? do |array_value|
|
||||
param_has_null_byte?(array_value, depth)
|
||||
end
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def string_contains_null_byte?(string)
|
||||
string.match?(NULL_BYTE_REGEX)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1 +0,0 @@
|
|||
empty
|
|
@ -1,89 +0,0 @@
|
|||
import { GlTable, GlIcon } from '@gitlab/ui';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import Tracking from '~/tracking';
|
||||
import AlertIntegrationsList, {
|
||||
i18n,
|
||||
} from '~/alerts_settings/components/alerts_integrations_list.vue';
|
||||
import { trackAlertIntergrationsViewsOptions } from '~/alerts_settings/constants';
|
||||
|
||||
const mockIntegrations = [
|
||||
{
|
||||
activated: true,
|
||||
name: 'Integration 1',
|
||||
type: 'HTTP endpoint',
|
||||
},
|
||||
{
|
||||
activated: false,
|
||||
name: 'Integration 2',
|
||||
type: 'HTTP endpoint',
|
||||
},
|
||||
];
|
||||
|
||||
describe('AlertIntegrationsList', () => {
|
||||
let wrapper;
|
||||
|
||||
function mountComponent(propsData = {}) {
|
||||
wrapper = mount(AlertIntegrationsList, {
|
||||
propsData: {
|
||||
integrations: mockIntegrations,
|
||||
...propsData,
|
||||
},
|
||||
stubs: {
|
||||
GlIcon: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
if (wrapper) {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mountComponent();
|
||||
});
|
||||
|
||||
const findTableComponent = () => wrapper.find(GlTable);
|
||||
const finsStatusCell = () => wrapper.findAll('[data-testid="integration-activated-status"]');
|
||||
|
||||
it('renders a table', () => {
|
||||
expect(findTableComponent().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders an empty state when no integrations provided', () => {
|
||||
mountComponent({ integrations: [] });
|
||||
expect(findTableComponent().text()).toContain(i18n.emptyState);
|
||||
});
|
||||
|
||||
describe('integration status', () => {
|
||||
it('enabled', () => {
|
||||
const cell = finsStatusCell().at(0);
|
||||
const activatedIcon = cell.find(GlIcon);
|
||||
expect(cell.text()).toBe(i18n.status.enabled.name);
|
||||
expect(activatedIcon.attributes('name')).toBe('check-circle-filled');
|
||||
expect(activatedIcon.attributes('title')).toBe(i18n.status.enabled.tooltip);
|
||||
});
|
||||
|
||||
it('disabled', () => {
|
||||
const cell = finsStatusCell().at(1);
|
||||
const notActivatedIcon = cell.find(GlIcon);
|
||||
expect(cell.text()).toBe(i18n.status.disabled.name);
|
||||
expect(notActivatedIcon.attributes('name')).toBe('warning-solid');
|
||||
expect(notActivatedIcon.attributes('title')).toBe(i18n.status.disabled.tooltip);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Snowplow tracking', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(Tracking, 'event');
|
||||
mountComponent();
|
||||
});
|
||||
|
||||
it('should track alert list page views', () => {
|
||||
const { category, action } = trackAlertIntergrationsViewsOptions;
|
||||
expect(Tracking.event).toHaveBeenCalledWith(category, action);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,161 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`PipelinesChart when fetching more data when the fetchMore query returns data passes the data to the line chart 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
21,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
10,
|
||||
],
|
||||
Array [
|
||||
"2020-08-01",
|
||||
5,
|
||||
],
|
||||
],
|
||||
"name": "Total",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
21,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
10,
|
||||
],
|
||||
Array [
|
||||
"2020-08-01",
|
||||
5,
|
||||
],
|
||||
],
|
||||
"name": "Succeeded",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
22,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
41,
|
||||
],
|
||||
Array [
|
||||
"2020-08-01",
|
||||
5,
|
||||
],
|
||||
],
|
||||
"name": "Failed",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
21,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
10,
|
||||
],
|
||||
Array [
|
||||
"2020-08-01",
|
||||
5,
|
||||
],
|
||||
],
|
||||
"name": "Canceled",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
21,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
10,
|
||||
],
|
||||
Array [
|
||||
"2020-08-01",
|
||||
5,
|
||||
],
|
||||
],
|
||||
"name": "Skipped",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`PipelinesChart with data passes the data to the line chart 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
22,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
41,
|
||||
],
|
||||
],
|
||||
"name": "Total",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
21,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
10,
|
||||
],
|
||||
],
|
||||
"name": "Succeeded",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
21,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
10,
|
||||
],
|
||||
],
|
||||
"name": "Failed",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
22,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
41,
|
||||
],
|
||||
],
|
||||
"name": "Canceled",
|
||||
},
|
||||
Object {
|
||||
"data": Array [
|
||||
Array [
|
||||
"2020-06-01",
|
||||
22,
|
||||
],
|
||||
Array [
|
||||
"2020-07-01",
|
||||
41,
|
||||
],
|
||||
],
|
||||
"name": "Skipped",
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -1,189 +0,0 @@
|
|||
import { createLocalVue, shallowMount } from '@vue/test-utils';
|
||||
import { GlLineChart } from '@gitlab/ui/dist/charts';
|
||||
import { GlAlert } from '@gitlab/ui';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import createMockApollo from 'jest/helpers/mock_apollo_helper';
|
||||
import PipelinesChart from '~/analytics/instance_statistics/components/pipelines_chart.vue';
|
||||
import pipelinesStatsQuery from '~/analytics/instance_statistics/graphql/queries/pipeline_stats.query.graphql';
|
||||
import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue';
|
||||
import { mockCountsData1, mockCountsData2 } from '../mock_data';
|
||||
import { getApolloResponse } from '../apollo_mock_data';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
localVue.use(VueApollo);
|
||||
|
||||
describe('PipelinesChart', () => {
|
||||
let wrapper;
|
||||
let queryHandler;
|
||||
|
||||
const createApolloProvider = pipelineStatsHandler => {
|
||||
return createMockApollo([[pipelinesStatsQuery, pipelineStatsHandler]]);
|
||||
};
|
||||
|
||||
const createComponent = apolloProvider => {
|
||||
return shallowMount(PipelinesChart, {
|
||||
localVue,
|
||||
apolloProvider,
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
const findLoader = () => wrapper.find(ChartSkeletonLoader);
|
||||
const findChart = () => wrapper.find(GlLineChart);
|
||||
const findAlert = () => wrapper.find(GlAlert);
|
||||
|
||||
describe('while loading', () => {
|
||||
beforeEach(() => {
|
||||
queryHandler = jest.fn().mockReturnValue(new Promise(() => {}));
|
||||
const apolloProvider = createApolloProvider(queryHandler);
|
||||
wrapper = createComponent(apolloProvider);
|
||||
});
|
||||
|
||||
it('requests data', () => {
|
||||
expect(queryHandler).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
it('displays the skeleton loader', () => {
|
||||
expect(findLoader().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('hides the chart', () => {
|
||||
expect(findChart().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not show an error', () => {
|
||||
expect(findAlert().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('without data', () => {
|
||||
beforeEach(() => {
|
||||
const emptyResponse = getApolloResponse();
|
||||
queryHandler = jest.fn().mockResolvedValue(emptyResponse);
|
||||
const apolloProvider = createApolloProvider(queryHandler);
|
||||
wrapper = createComponent(apolloProvider);
|
||||
});
|
||||
|
||||
it('renders an no data message', () => {
|
||||
expect(findAlert().text()).toBe('There is no data available.');
|
||||
});
|
||||
|
||||
it('hides the skeleton loader', () => {
|
||||
expect(findLoader().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('renders the chart', () => {
|
||||
expect(findChart().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with data', () => {
|
||||
beforeEach(() => {
|
||||
const response = getApolloResponse({
|
||||
pipelinesTotal: mockCountsData1,
|
||||
pipelinesSucceeded: mockCountsData2,
|
||||
pipelinesFailed: mockCountsData2,
|
||||
pipelinesCanceled: mockCountsData1,
|
||||
pipelinesSkipped: mockCountsData1,
|
||||
});
|
||||
queryHandler = jest.fn().mockResolvedValue(response);
|
||||
const apolloProvider = createApolloProvider(queryHandler);
|
||||
wrapper = createComponent(apolloProvider);
|
||||
});
|
||||
|
||||
it('requests data', () => {
|
||||
expect(queryHandler).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
it('hides the skeleton loader', () => {
|
||||
expect(findLoader().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('renders the chart', () => {
|
||||
expect(findChart().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('passes the data to the line chart', () => {
|
||||
expect(findChart().props('data')).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('does not show an error', () => {
|
||||
expect(findAlert().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when fetching more data', () => {
|
||||
const recordedAt = '2020-08-01';
|
||||
describe('when the fetchMore query returns data', () => {
|
||||
beforeEach(async () => {
|
||||
const newData = { recordedAt, count: 5 };
|
||||
const firstResponse = getApolloResponse({
|
||||
pipelinesTotal: mockCountsData2,
|
||||
pipelinesSucceeded: mockCountsData2,
|
||||
pipelinesFailed: mockCountsData1,
|
||||
pipelinesCanceled: mockCountsData2,
|
||||
pipelinesSkipped: mockCountsData2,
|
||||
hasNextPage: true,
|
||||
});
|
||||
const secondResponse = getApolloResponse({
|
||||
pipelinesTotal: [newData],
|
||||
pipelinesSucceeded: [newData],
|
||||
pipelinesFailed: [newData],
|
||||
pipelinesCanceled: [newData],
|
||||
pipelinesSkipped: [newData],
|
||||
hasNextPage: false,
|
||||
});
|
||||
queryHandler = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce(firstResponse)
|
||||
.mockResolvedValueOnce(secondResponse);
|
||||
const apolloProvider = createApolloProvider(queryHandler);
|
||||
wrapper = createComponent(apolloProvider);
|
||||
|
||||
await wrapper.vm.$nextTick();
|
||||
});
|
||||
|
||||
it('requests data twice', () => {
|
||||
expect(queryHandler).toBeCalledTimes(2);
|
||||
});
|
||||
|
||||
it('passes the data to the line chart', () => {
|
||||
expect(findChart().props('data')).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the fetchMore query throws an error', () => {
|
||||
beforeEach(async () => {
|
||||
const response = getApolloResponse({
|
||||
pipelinesTotal: mockCountsData2,
|
||||
pipelinesSucceeded: mockCountsData2,
|
||||
pipelinesFailed: mockCountsData1,
|
||||
pipelinesCanceled: mockCountsData2,
|
||||
pipelinesSkipped: mockCountsData2,
|
||||
hasNextPage: true,
|
||||
});
|
||||
queryHandler = jest.fn().mockResolvedValue(response);
|
||||
const apolloProvider = createApolloProvider(queryHandler);
|
||||
wrapper = createComponent(apolloProvider);
|
||||
jest
|
||||
.spyOn(wrapper.vm.$apollo.queries.pipelineStats, 'fetchMore')
|
||||
.mockImplementation(jest.fn().mockRejectedValue());
|
||||
await wrapper.vm.$nextTick();
|
||||
});
|
||||
|
||||
it('calls fetchMore', () => {
|
||||
expect(wrapper.vm.$apollo.queries.pipelineStats.fetchMore).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('show an error message', () => {
|
||||
expect(findAlert().text()).toBe(
|
||||
'Could not load the pipelines chart. Please refresh the page to try again.',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,196 +0,0 @@
|
|||
import Vuex from 'vuex';
|
||||
import { createLocalVue, shallowMount } from '@vue/test-utils';
|
||||
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
|
||||
import * as urlUtils from '~/lib/utils/url_utility';
|
||||
import initStore from '~/search/store';
|
||||
import DropdownFilter from '~/search/dropdown_filter/components/dropdown_filter.vue';
|
||||
import stateFilterData from '~/search/dropdown_filter/constants/state_filter_data';
|
||||
import confidentialFilterData from '~/search/dropdown_filter/constants/confidential_filter_data';
|
||||
import { MOCK_QUERY } from '../mock_data';
|
||||
|
||||
jest.mock('~/lib/utils/url_utility', () => ({
|
||||
visitUrl: jest.fn(),
|
||||
setUrlParams: jest.fn(),
|
||||
}));
|
||||
|
||||
const localVue = createLocalVue();
|
||||
localVue.use(Vuex);
|
||||
|
||||
describe('DropdownFilter', () => {
|
||||
let wrapper;
|
||||
let store;
|
||||
|
||||
const createStore = options => {
|
||||
store = initStore({ query: MOCK_QUERY, ...options });
|
||||
};
|
||||
|
||||
const createComponent = (props = { filterData: stateFilterData }) => {
|
||||
wrapper = shallowMount(DropdownFilter, {
|
||||
localVue,
|
||||
store,
|
||||
propsData: {
|
||||
...props,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
store = null;
|
||||
});
|
||||
|
||||
const findGlDropdown = () => wrapper.find(GlDropdown);
|
||||
const findGlDropdownItems = () => findGlDropdown().findAll(GlDropdownItem);
|
||||
const findDropdownItemsText = () => findGlDropdownItems().wrappers.map(w => w.text());
|
||||
const firstDropDownItem = () => findGlDropdownItems().at(0);
|
||||
|
||||
describe('StatusFilter', () => {
|
||||
describe('template', () => {
|
||||
describe.each`
|
||||
scope | showDropdown
|
||||
${'issues'} | ${true}
|
||||
${'merge_requests'} | ${true}
|
||||
${'projects'} | ${false}
|
||||
${'milestones'} | ${false}
|
||||
${'users'} | ${false}
|
||||
${'notes'} | ${false}
|
||||
${'wiki_blobs'} | ${false}
|
||||
${'blobs'} | ${false}
|
||||
`(`dropdown`, ({ scope, showDropdown }) => {
|
||||
beforeEach(() => {
|
||||
createStore({ query: { ...MOCK_QUERY, scope } });
|
||||
createComponent();
|
||||
});
|
||||
|
||||
it(`does${showDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
|
||||
expect(findGlDropdown().exists()).toBe(showDropdown);
|
||||
});
|
||||
});
|
||||
|
||||
describe.each`
|
||||
initialFilter | label
|
||||
${stateFilterData.filters.ANY.value} | ${`Any ${stateFilterData.header}`}
|
||||
${stateFilterData.filters.OPEN.value} | ${stateFilterData.filters.OPEN.label}
|
||||
${stateFilterData.filters.CLOSED.value} | ${stateFilterData.filters.CLOSED.label}
|
||||
`(`filter text`, ({ initialFilter, label }) => {
|
||||
describe(`when initialFilter is ${initialFilter}`, () => {
|
||||
beforeEach(() => {
|
||||
createStore({ query: { ...MOCK_QUERY, [stateFilterData.filterParam]: initialFilter } });
|
||||
createComponent();
|
||||
});
|
||||
|
||||
it(`sets dropdown label to ${label}`, () => {
|
||||
expect(findGlDropdown().attributes('text')).toBe(label);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Filter options', () => {
|
||||
beforeEach(() => {
|
||||
createStore();
|
||||
createComponent();
|
||||
});
|
||||
|
||||
it('renders a dropdown item for each filterOption', () => {
|
||||
expect(findDropdownItemsText()).toStrictEqual(
|
||||
stateFilterData.filterByScope[stateFilterData.scopes.ISSUES].map(v => {
|
||||
return v.label;
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('clicking a dropdown item calls setUrlParams', () => {
|
||||
const filter = stateFilterData.filters[Object.keys(stateFilterData.filters)[0]].value;
|
||||
firstDropDownItem().vm.$emit('click');
|
||||
|
||||
expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
|
||||
[stateFilterData.filterParam]: filter,
|
||||
});
|
||||
});
|
||||
|
||||
it('clicking a dropdown item calls visitUrl', () => {
|
||||
firstDropDownItem().vm.$emit('click');
|
||||
|
||||
expect(urlUtils.visitUrl).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ConfidentialFilter', () => {
|
||||
describe('template', () => {
|
||||
describe.each`
|
||||
scope | showDropdown
|
||||
${'issues'} | ${true}
|
||||
${'merge_requests'} | ${false}
|
||||
${'projects'} | ${false}
|
||||
${'milestones'} | ${false}
|
||||
${'users'} | ${false}
|
||||
${'notes'} | ${false}
|
||||
${'wiki_blobs'} | ${false}
|
||||
${'blobs'} | ${false}
|
||||
`(`dropdown`, ({ scope, showDropdown }) => {
|
||||
beforeEach(() => {
|
||||
createStore({ query: { ...MOCK_QUERY, scope } });
|
||||
createComponent({ filterData: confidentialFilterData });
|
||||
});
|
||||
|
||||
it(`does${showDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
|
||||
expect(findGlDropdown().exists()).toBe(showDropdown);
|
||||
});
|
||||
});
|
||||
|
||||
describe.each`
|
||||
initialFilter | label
|
||||
${confidentialFilterData.filters.ANY.value} | ${`Any ${confidentialFilterData.header}`}
|
||||
${confidentialFilterData.filters.CONFIDENTIAL.value} | ${confidentialFilterData.filters.CONFIDENTIAL.label}
|
||||
${confidentialFilterData.filters.NOT_CONFIDENTIAL.value} | ${confidentialFilterData.filters.NOT_CONFIDENTIAL.label}
|
||||
`(`filter text`, ({ initialFilter, label }) => {
|
||||
describe(`when initialFilter is ${initialFilter}`, () => {
|
||||
beforeEach(() => {
|
||||
createStore({
|
||||
query: { ...MOCK_QUERY, [confidentialFilterData.filterParam]: initialFilter },
|
||||
});
|
||||
createComponent({ filterData: confidentialFilterData });
|
||||
});
|
||||
|
||||
it(`sets dropdown label to ${label}`, () => {
|
||||
expect(findGlDropdown().attributes('text')).toBe(label);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Filter options', () => {
|
||||
beforeEach(() => {
|
||||
createStore();
|
||||
createComponent({ filterData: confidentialFilterData });
|
||||
});
|
||||
|
||||
it('renders a dropdown item for each filterOption', () => {
|
||||
expect(findDropdownItemsText()).toStrictEqual(
|
||||
confidentialFilterData.filterByScope[confidentialFilterData.scopes.ISSUES].map(v => {
|
||||
return v.label;
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('clicking a dropdown item calls setUrlParams', () => {
|
||||
const filter =
|
||||
confidentialFilterData.filters[Object.keys(confidentialFilterData.filters)[0]].value;
|
||||
firstDropDownItem().vm.$emit('click');
|
||||
|
||||
expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
|
||||
[confidentialFilterData.filterParam]: filter,
|
||||
});
|
||||
});
|
||||
|
||||
it('clicking a dropdown item calls visitUrl', () => {
|
||||
firstDropDownItem().vm.$emit('click');
|
||||
|
||||
expect(urlUtils.visitUrl).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,5 +0,0 @@
|
|||
export const MOCK_QUERY = {
|
||||
scope: 'issues',
|
||||
state: 'all',
|
||||
confidential: null,
|
||||
};
|
|
@ -1,95 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BulkImport::Client do
|
||||
include ImportSpecHelper
|
||||
|
||||
let(:uri) { 'http://gitlab.example' }
|
||||
let(:token) { 'token' }
|
||||
let(:resource) { 'resource' }
|
||||
|
||||
subject { described_class.new(uri: uri, token: token) }
|
||||
|
||||
describe '#get' do
|
||||
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
|
||||
|
||||
shared_examples 'performs network request' do
|
||||
it 'performs network request' do
|
||||
expect(Gitlab::HTTP).to receive(:get).with(*expected_args).and_return(response_double)
|
||||
|
||||
subject.get(resource)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'parsed response' do
|
||||
it 'returns parsed response' do
|
||||
response_double = double(code: 200, success?: true, parsed_response: [{ id: 1 }, { id: 2 }])
|
||||
|
||||
allow(Gitlab::HTTP).to receive(:get).and_return(response_double)
|
||||
|
||||
expect(subject.get(resource)).to eq(response_double.parsed_response)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'request query' do
|
||||
include_examples 'performs network request' do
|
||||
let(:expected_args) do
|
||||
[
|
||||
anything,
|
||||
hash_including(
|
||||
query: {
|
||||
page: described_class::DEFAULT_PAGE,
|
||||
per_page: described_class::DEFAULT_PER_PAGE
|
||||
}
|
||||
)
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'request headers' do
|
||||
include_examples 'performs network request' do
|
||||
let(:expected_args) do
|
||||
[
|
||||
anything,
|
||||
hash_including(
|
||||
headers: {
|
||||
'Content-Type' => 'application/json',
|
||||
'Authorization' => "Bearer #{token}"
|
||||
}
|
||||
)
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'request uri' do
|
||||
include_examples 'performs network request' do
|
||||
let(:expected_args) do
|
||||
['http://gitlab.example:80/api/v4/resource', anything]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'error handling' do
|
||||
context 'when error occurred' do
|
||||
it 'raises ConnectionError' do
|
||||
allow(Gitlab::HTTP).to receive(:get).and_raise(Errno::ECONNREFUSED)
|
||||
|
||||
expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when response is not success' do
|
||||
it 'raises ConnectionError' do
|
||||
response_double = double(code: 503, success?: false)
|
||||
|
||||
allow(Gitlab::HTTP).to receive(:get).and_return(response_double)
|
||||
|
||||
expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,87 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This spec is a lightweight version of:
|
||||
# * project/tree_restorer_spec.rb
|
||||
#
|
||||
# In depth testing is being done in the above specs.
|
||||
# This spec tests that restore of the sample project works
|
||||
# but does not have 100% relation coverage.
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::ImportExport::Project::Sample::SampleDataRelationTreeRestorer do
|
||||
include_context 'relation tree restorer shared context'
|
||||
|
||||
let(:sample_data_relation_tree_restorer) do
|
||||
described_class.new(
|
||||
user: user,
|
||||
shared: shared,
|
||||
relation_reader: relation_reader,
|
||||
object_builder: object_builder,
|
||||
members_mapper: members_mapper,
|
||||
relation_factory: relation_factory,
|
||||
reader: reader,
|
||||
importable: importable,
|
||||
importable_path: importable_path,
|
||||
importable_attributes: attributes
|
||||
)
|
||||
end
|
||||
|
||||
subject { sample_data_relation_tree_restorer.restore }
|
||||
|
||||
shared_examples 'import project successfully' do
|
||||
it 'restores project tree' do
|
||||
expect(subject).to eq(true)
|
||||
end
|
||||
|
||||
describe 'imported project' do
|
||||
let(:project) { Project.find_by_path('project') }
|
||||
|
||||
before do
|
||||
subject
|
||||
end
|
||||
|
||||
it 'has the project attributes and relations', :aggregate_failures do
|
||||
expect(project.description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.')
|
||||
expect(project.issues.count).to eq(10)
|
||||
expect(project.milestones.count).to eq(3)
|
||||
expect(project.labels.count).to eq(2)
|
||||
expect(project.project_feature).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has issues with correctly updated due dates' do
|
||||
due_dates = due_dates(project.issues)
|
||||
|
||||
expect(due_dates).to match_array([Date.today - 7.days, Date.today, Date.today + 7.days])
|
||||
end
|
||||
|
||||
it 'has milestones with correctly updated due dates' do
|
||||
due_dates = due_dates(project.milestones)
|
||||
|
||||
expect(due_dates).to match_array([Date.today - 7.days, Date.today, Date.today + 7.days])
|
||||
end
|
||||
|
||||
def due_dates(relations)
|
||||
due_dates = relations.map { |relation| relation['due_date'] }
|
||||
due_dates.compact!
|
||||
due_dates.sort
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when restoring a project' do
|
||||
let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
|
||||
let(:importable_name) { 'project' }
|
||||
let(:importable_path) { 'project' }
|
||||
let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder }
|
||||
let(:relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
|
||||
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
|
||||
|
||||
context 'using ndjson reader' do
|
||||
let(:path) { 'spec/fixtures/lib/gitlab/import_export/sample_data/tree' }
|
||||
let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) }
|
||||
|
||||
it_behaves_like 'import project successfully'
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,88 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require "rack/test"
|
||||
|
||||
RSpec.describe Gitlab::Middleware::HandleNullBytes do
|
||||
let(:null_byte) { "\u0000" }
|
||||
let(:error_400) { [400, {}, ["Bad Request"]] }
|
||||
let(:app) { double(:app) }
|
||||
|
||||
subject { described_class.new(app) }
|
||||
|
||||
before do
|
||||
allow(app).to receive(:call) do |args|
|
||||
args
|
||||
end
|
||||
end
|
||||
|
||||
def env_for(params = {})
|
||||
Rack::MockRequest.env_for('/', { params: params })
|
||||
end
|
||||
|
||||
context 'with null bytes in params' do
|
||||
it 'rejects null bytes in a top level param' do
|
||||
env = env_for(name: "null#{null_byte}byte")
|
||||
|
||||
expect(subject.call(env)).to eq error_400
|
||||
end
|
||||
|
||||
it "responds with 400 BadRequest for hashes with strings" do
|
||||
env = env_for(name: { inner_key: "I am #{null_byte} bad" })
|
||||
|
||||
expect(subject.call(env)).to eq error_400
|
||||
end
|
||||
|
||||
it "responds with 400 BadRequest for arrays with strings" do
|
||||
env = env_for(name: ["I am #{null_byte} bad"])
|
||||
|
||||
expect(subject.call(env)).to eq error_400
|
||||
end
|
||||
|
||||
it "responds with 400 BadRequest for arrays containing hashes with string values" do
|
||||
env = env_for(name: [
|
||||
{
|
||||
inner_key: "I am #{null_byte} bad"
|
||||
}
|
||||
])
|
||||
|
||||
expect(subject.call(env)).to eq error_400
|
||||
end
|
||||
|
||||
it "gives up and does not 400 with too deeply nested params" do
|
||||
env = env_for(name: [
|
||||
{
|
||||
inner_key: { deeper_key: [{ hash_inside_array_key: "I am #{null_byte} bad" }] }
|
||||
}
|
||||
])
|
||||
|
||||
expect(subject.call(env)).not_to eq error_400
|
||||
end
|
||||
end
|
||||
|
||||
context 'without null bytes in params' do
|
||||
it "does not respond with a 400 for strings" do
|
||||
env = env_for(name: "safe name")
|
||||
|
||||
expect(subject.call(env)).not_to eq error_400
|
||||
end
|
||||
|
||||
it "does not respond with a 400 with no params" do
|
||||
env = env_for
|
||||
|
||||
expect(subject.call(env)).not_to eq error_400
|
||||
end
|
||||
end
|
||||
|
||||
context 'when disabled via env flag' do
|
||||
before do
|
||||
stub_env('REJECT_NULL_BYTES', '1')
|
||||
end
|
||||
|
||||
it 'does not respond with a 400 no matter what' do
|
||||
env = env_for(name: "null#{null_byte}byte")
|
||||
|
||||
expect(subject.call(env)).not_to eq error_400
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,37 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require Rails.root.join('db', 'post_migrate', '20201015073808_schedule_blocked_by_links_replacement')
|
||||
|
||||
RSpec.describe ScheduleBlockedByLinksReplacement do
|
||||
let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
|
||||
let(:project) { table(:projects).create!(namespace_id: namespace.id, name: 'gitlab') }
|
||||
let(:issue1) { table(:issues).create!(project_id: project.id, title: 'a') }
|
||||
let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') }
|
||||
let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') }
|
||||
let!(:issue_links) do
|
||||
[
|
||||
table(:issue_links).create!(source_id: issue1.id, target_id: issue2.id, link_type: 1),
|
||||
table(:issue_links).create!(source_id: issue2.id, target_id: issue1.id, link_type: 2),
|
||||
table(:issue_links).create!(source_id: issue1.id, target_id: issue3.id, link_type: 2)
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
stub_const("#{described_class.name}::BATCH_SIZE", 1)
|
||||
end
|
||||
|
||||
it 'schedules jobs for blocked_by links' do
|
||||
Sidekiq::Testing.fake! do
|
||||
freeze_time do
|
||||
migrate!
|
||||
|
||||
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
|
||||
2.minutes, issue_links[1].id, issue_links[1].id)
|
||||
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
|
||||
4.minutes, issue_links[2].id, issue_links[2].id)
|
||||
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,164 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::BuildTraceChunks::LegacyFog do
|
||||
let(:data_store) { described_class.new }
|
||||
|
||||
before do
|
||||
stub_artifacts_object_storage
|
||||
end
|
||||
|
||||
describe '#available?' do
|
||||
subject { data_store.available? }
|
||||
|
||||
context 'when object storage is enabled' do
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
|
||||
context 'when object storage is disabled' do
|
||||
before do
|
||||
stub_artifacts_object_storage(enabled: false)
|
||||
end
|
||||
|
||||
it { is_expected.to be_falsy }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#data' do
|
||||
subject { data_store.data(model) }
|
||||
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
|
||||
|
||||
it 'returns the data' do
|
||||
is_expected.to eq('sample data in fog')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(data_store.data(model)).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#set_data' do
|
||||
let(:new_data) { 'abc123' }
|
||||
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
|
||||
|
||||
it 'overwrites data' do
|
||||
expect(data_store.data(model)).to eq('sample data in fog')
|
||||
|
||||
data_store.set_data(model, new_data)
|
||||
|
||||
expect(data_store.data(model)).to eq new_data
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
|
||||
|
||||
it 'sets new data' do
|
||||
expect(data_store.data(model)).to be_nil
|
||||
|
||||
data_store.set_data(model, new_data)
|
||||
|
||||
expect(data_store.data(model)).to eq new_data
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#delete_data' do
|
||||
subject { data_store.delete_data(model) }
|
||||
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
|
||||
|
||||
it 'deletes data' do
|
||||
expect(data_store.data(model)).to eq('sample data in fog')
|
||||
|
||||
subject
|
||||
|
||||
expect(data_store.data(model)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
|
||||
|
||||
it 'does nothing' do
|
||||
expect(data_store.data(model)).to be_nil
|
||||
|
||||
subject
|
||||
|
||||
expect(data_store.data(model)).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#size' do
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'üabcd') }
|
||||
|
||||
it 'returns data bytesize correctly' do
|
||||
expect(data_store.size(model)).to eq 6
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
|
||||
|
||||
it 'returns zero' do
|
||||
expect(data_store.size(model)).to be_zero
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#keys' do
|
||||
subject { data_store.keys(relation) }
|
||||
|
||||
let(:build) { create(:ci_build) }
|
||||
let(:relation) { build.trace_chunks }
|
||||
|
||||
before do
|
||||
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
|
||||
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
|
||||
end
|
||||
|
||||
it 'returns keys' do
|
||||
is_expected.to eq([[build.id, 0], [build.id, 1]])
|
||||
end
|
||||
end
|
||||
|
||||
describe '#delete_keys' do
|
||||
subject { data_store.delete_keys(keys) }
|
||||
|
||||
let(:build) { create(:ci_build) }
|
||||
let(:relation) { build.trace_chunks }
|
||||
let(:keys) { data_store.keys(relation) }
|
||||
|
||||
before do
|
||||
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
|
||||
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
|
||||
end
|
||||
|
||||
it 'deletes multiple data' do
|
||||
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
|
||||
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present
|
||||
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present
|
||||
end
|
||||
|
||||
subject
|
||||
|
||||
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
|
||||
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound)
|
||||
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'User sends null bytes as params' do
|
||||
let(:null_byte) { "\u0000" }
|
||||
|
||||
it 'raises a 400 error' do
|
||||
post '/nonexistent', params: { a: "A #{null_byte} nasty string" }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
expect(response.body).to eq('Bad Request')
|
||||
end
|
||||
end
|
Loading…
Reference in a new issue