debian-mirror-gitlab/spec/requests/api/graphql/project/merge_requests_spec.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

623 lines
19 KiB
Ruby
Raw Normal View History

2020-06-23 00:09:42 +05:30
# frozen_string_literal: true
require 'spec_helper'
2023-03-17 16:20:25 +05:30
RSpec.describe 'getting merge request listings nested in a project', feature_category: :code_review_workflow do
2020-06-23 00:09:42 +05:30
include GraphqlHelpers
2022-11-25 23:54:43 +05:30
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, :public, group: group) }
2020-06-23 00:09:42 +05:30
let_it_be(:current_user) { create(:user) }
2020-11-24 15:15:51 +05:30
let_it_be(:label) { create(:label, project: project) }
2022-11-25 23:54:43 +05:30
let_it_be(:group_label) { create(:group_label, group: group) }
2021-04-17 20:07:23 +05:30
2022-07-16 23:28:13 +05:30
let_it_be_with_reload(:merge_request_a) do
2022-11-25 23:54:43 +05:30
create(:labeled_merge_request, :unique_branches, source_project: project, labels: [label, group_label])
2021-04-17 20:07:23 +05:30
end
let_it_be(:merge_request_b) do
create(:merge_request, :closed, :unique_branches, source_project: project)
end
let_it_be(:merge_request_c) do
2022-11-25 23:54:43 +05:30
create(:labeled_merge_request, :closed, :unique_branches, source_project: project, labels: [label, group_label])
2021-04-17 20:07:23 +05:30
end
let_it_be(:merge_request_d) do
create(:merge_request, :locked, :unique_branches, source_project: project)
end
let_it_be(:merge_request_e) do
create(:merge_request, :unique_branches, source_project: project)
end
2020-06-23 00:09:42 +05:30
2022-04-04 11:22:00 +05:30
let(:all_merge_requests) do
[merge_request_a, merge_request_b, merge_request_c, merge_request_d, merge_request_e]
end
2020-06-23 00:09:42 +05:30
let(:results) { graphql_data.dig('project', 'mergeRequests', 'nodes') }
let(:search_params) { nil }
def query_merge_requests(fields)
graphql_query_for(
:project,
{ full_path: project.full_path },
2021-03-08 18:12:59 +05:30
query_nodes(:merge_requests, fields, args: search_params)
2020-06-23 00:09:42 +05:30
)
end
it_behaves_like 'a working graphql query' do
2021-04-17 20:07:23 +05:30
let(:query) do
query_merge_requests(all_graphql_fields_for('MergeRequest', max_depth: 2))
end
2020-06-23 00:09:42 +05:30
before do
2021-04-29 21:17:54 +05:30
# We cannot disable SQL query limiting here, since the transaction does not
2021-04-17 20:07:23 +05:30
# begin until we enter the controller.
headers = {
2021-04-29 21:17:54 +05:30
'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/322979'
2021-04-17 20:07:23 +05:30
}
post_graphql(query, current_user: current_user, headers: headers)
2020-06-23 00:09:42 +05:30
end
end
# The following tests are needed to guarantee that we have correctly annotated
# all the gitaly calls. Selecting combinations of fields may mask this due to
# memoization.
2021-04-17 20:07:23 +05:30
context 'when requesting a single field' do
2020-06-23 00:09:42 +05:30
let_it_be(:fresh_mr) { create(:merge_request, :unique_branches, source_project: project) }
2021-04-17 20:07:23 +05:30
2020-06-23 00:09:42 +05:30
let(:search_params) { { iids: [fresh_mr.iid.to_s] } }
2021-04-17 20:07:23 +05:30
let(:graphql_data) do
GitlabSchema.execute(query, context: { current_user: current_user }).to_h['data']
end
2020-06-23 00:09:42 +05:30
before do
project.repository.expire_branches_cache
end
2021-04-17 20:07:23 +05:30
context 'when selecting any single scalar field' do
2020-06-23 00:09:42 +05:30
where(:field) do
scalar_fields_of('MergeRequest').map { |name| [name] }
end
with_them do
2021-03-08 18:12:59 +05:30
let(:query) do
query_merge_requests([:iid, field].uniq)
end
it 'selects the correct MR' do
expect(results).to contain_exactly(a_hash_including('iid' => fresh_mr.iid.to_s))
2020-06-23 00:09:42 +05:30
end
end
end
2021-04-17 20:07:23 +05:30
context 'when selecting any single nested field' do
2020-06-23 00:09:42 +05:30
where(:field, :subfield, :is_connection) do
nested_fields_of('MergeRequest').flat_map do |name, field|
type = field_type(field)
2022-07-16 23:28:13 +05:30
is_connection = type.graphql_name.ends_with?('Connection')
2020-06-23 00:09:42 +05:30
type = field_type(type.fields['nodes']) if is_connection
type.fields
.select { |_, field| !nested_fields?(field) && !required_arguments?(field) }
.map(&:first)
.map { |subfield| [name, subfield, is_connection] }
end
end
with_them do
2021-03-08 18:12:59 +05:30
let(:query) do
fld = is_connection ? query_graphql_field(:nodes, nil, [subfield]) : subfield
query_merge_requests([:iid, query_graphql_field(field, nil, [fld])])
end
it 'selects the correct MR' do
expect(results).to contain_exactly(a_hash_including('iid' => fresh_mr.iid.to_s))
2020-06-23 00:09:42 +05:30
end
end
end
end
2021-04-17 20:07:23 +05:30
shared_examples 'when searching with parameters' do
let(:query) do
query_merge_requests('iid title')
end
2020-06-23 00:09:42 +05:30
let(:expected) do
mrs.map { |mr| a_hash_including('iid' => mr.iid.to_s, 'title' => mr.title) }
end
it 'finds the right mrs' do
post_graphql(query, current_user: current_user)
expect(results).to match_array(expected)
end
end
2021-04-17 20:07:23 +05:30
context 'when there are no search params' do
2020-06-23 00:09:42 +05:30
let(:search_params) { nil }
2021-01-03 14:25:43 +05:30
let(:mrs) { [merge_request_a, merge_request_b, merge_request_c, merge_request_d, merge_request_e] }
2020-06-23 00:09:42 +05:30
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2021-04-17 20:07:23 +05:30
context 'when the search params do not match anything' do
let(:search_params) { { iids: %w[foo bar baz] } }
2020-06-23 00:09:42 +05:30
let(:mrs) { [] }
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2021-04-17 20:07:23 +05:30
context 'when searching by iids' do
2020-06-23 00:09:42 +05:30
let(:search_params) { { iids: mrs.map(&:iid).map(&:to_s) } }
let(:mrs) { [merge_request_a, merge_request_c] }
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2021-04-17 20:07:23 +05:30
context 'when searching by state' do
2020-06-23 00:09:42 +05:30
let(:search_params) { { state: :closed } }
let(:mrs) { [merge_request_b, merge_request_c] }
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2021-04-17 20:07:23 +05:30
context 'when searching by source_branch' do
2020-06-23 00:09:42 +05:30
let(:search_params) { { source_branches: mrs.map(&:source_branch) } }
let(:mrs) { [merge_request_b, merge_request_c] }
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2021-04-17 20:07:23 +05:30
context 'when searching by target_branch' do
2020-06-23 00:09:42 +05:30
let(:search_params) { { target_branches: mrs.map(&:target_branch) } }
let(:mrs) { [merge_request_a, merge_request_d] }
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2021-04-17 20:07:23 +05:30
context 'when searching by label' do
2020-06-23 00:09:42 +05:30
let(:search_params) { { labels: [label.title] } }
let(:mrs) { [merge_request_a, merge_request_c] }
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2022-04-04 11:22:00 +05:30
context 'when searching by update time' do
let(:start_time) { 10.days.ago }
let(:cutoff) { start_time + 36.hours }
before do
all_merge_requests.each_with_index do |mr, i|
mr.updated_at = start_time + i.days
mr.save!(touch: false)
end
end
context 'when searching by updated_after' do
let(:search_params) { { updated_after: cutoff } }
let(:mrs) { all_merge_requests[2..] }
it_behaves_like 'when searching with parameters'
end
context 'when searching by updated_before' do
let(:search_params) { { updated_before: cutoff } }
let(:mrs) { all_merge_requests[0..1] }
it_behaves_like 'when searching with parameters'
end
context 'when searching by updated_before and updated_after' do
let(:search_params) { { updated_after: cutoff, updated_before: cutoff + 2.days } }
let(:mrs) { all_merge_requests[2..3] }
it_behaves_like 'when searching with parameters'
end
end
2021-04-17 20:07:23 +05:30
context 'when searching by combination' do
2020-06-23 00:09:42 +05:30
let(:search_params) { { state: :closed, labels: [label.title] } }
let(:mrs) { [merge_request_c] }
2021-04-17 20:07:23 +05:30
it_behaves_like 'when searching with parameters'
2020-06-23 00:09:42 +05:30
end
2020-10-24 23:57:45 +05:30
2021-01-03 14:25:43 +05:30
context 'when requesting `approved_by`' do
let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_b.iid.to_s] } }
let(:extra_iid_for_second_query) { merge_request_c.iid.to_s }
let(:requested_fields) { query_graphql_field(:approved_by, nil, query_graphql_field(:nodes, nil, [:username])) }
def execute_query
query = query_merge_requests(requested_fields)
post_graphql(query, current_user: current_user)
end
it 'exposes approver username' do
merge_request_a.approved_by_users << current_user
execute_query
user_data = { 'username' => current_user.username }
expect(results).to include(a_hash_including('approvedBy' => { 'nodes' => array_including(user_data) }))
end
include_examples 'N+1 query check'
end
2020-10-24 23:57:45 +05:30
describe 'fields' do
let(:requested_fields) { nil }
let(:extra_iid_for_second_query) { merge_request_c.iid.to_s }
let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_b.iid.to_s] } }
def execute_query
query = query_merge_requests(requested_fields)
post_graphql(query, current_user: current_user)
end
context 'when requesting `commit_count`' do
2021-03-11 19:13:27 +05:30
let(:merge_request_with_commits) { create(:merge_request, source_project: project) }
let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_with_commits.iid.to_s] } }
let(:requested_fields) { [:iid, :commit_count] }
2020-10-24 23:57:45 +05:30
it 'exposes `commit_count`' do
execute_query
2021-04-17 20:07:23 +05:30
expect(results).to match_array [
2021-03-11 19:13:27 +05:30
{ "iid" => merge_request_a.iid.to_s, "commitCount" => 0 },
{ "iid" => merge_request_with_commits.iid.to_s, "commitCount" => 29 }
2021-04-17 20:07:23 +05:30
]
2020-10-24 23:57:45 +05:30
end
end
context 'when requesting `merged_at`' do
let(:requested_fields) { [:merged_at] }
before do
# make the MRs "merged"
[merge_request_a, merge_request_b, merge_request_c].each do |mr|
2021-04-17 20:07:23 +05:30
mr.update!(state_id: MergeRequest.available_states[:merged])
mr.metrics.update!(merged_at: Time.now)
2020-10-24 23:57:45 +05:30
end
end
include_examples 'N+1 query check'
end
2021-01-03 14:25:43 +05:30
context 'when requesting `user_notes_count`' do
let(:requested_fields) { [:user_notes_count] }
before do
create_list(:note_on_merge_request, 2, noteable: merge_request_a, project: project)
create(:note_on_merge_request, noteable: merge_request_c, project: project)
end
include_examples 'N+1 query check'
end
2021-01-29 00:20:46 +05:30
context 'when requesting `user_discussions_count`' do
let(:requested_fields) { [:user_discussions_count] }
before do
create_list(:note_on_merge_request, 2, noteable: merge_request_a, project: project)
create(:note_on_merge_request, noteable: merge_request_c, project: project)
end
include_examples 'N+1 query check'
end
2021-03-08 18:12:59 +05:30
context 'when requesting reviewers' do
let(:requested_fields) { ['reviewers { nodes { username } }'] }
before do
merge_request_a.reviewers << create(:user)
merge_request_a.reviewers << create(:user)
merge_request_c.reviewers << create(:user)
end
it 'returns the reviewers' do
2021-04-17 20:07:23 +05:30
nodes = merge_request_a.reviewers.map { |r| { 'username' => r.username } }
reviewers = { 'nodes' => match_array(nodes) }
2021-03-08 18:12:59 +05:30
execute_query
2021-04-17 20:07:23 +05:30
expect(results).to include a_hash_including('reviewers' => match(reviewers))
2021-03-08 18:12:59 +05:30
end
include_examples 'N+1 query check'
end
2022-11-25 23:54:43 +05:30
context 'when award emoji votes' do
let(:requested_fields) { [:upvotes, :downvotes] }
before do
create_list(:award_emoji, 2, name: 'thumbsup', awardable: merge_request_a)
create_list(:award_emoji, 2, name: 'thumbsdown', awardable: merge_request_b)
end
include_examples 'N+1 query check'
end
context 'when requesting participants' do
let(:requested_fields) { 'participants { nodes { name } }' }
before do
create(:award_emoji, :upvote, awardable: merge_request_a)
create(:award_emoji, :upvote, awardable: merge_request_b)
create(:award_emoji, :upvote, awardable: merge_request_c)
note_with_emoji_a = create(:note_on_merge_request, noteable: merge_request_a, project: project)
note_with_emoji_b = create(:note_on_merge_request, noteable: merge_request_b, project: project)
note_with_emoji_c = create(:note_on_merge_request, noteable: merge_request_c, project: project)
create(:award_emoji, :upvote, awardable: note_with_emoji_a)
create(:award_emoji, :upvote, awardable: note_with_emoji_b)
create(:award_emoji, :upvote, awardable: note_with_emoji_c)
end
# Executes 3 extra queries to fetch participant_attrs
include_examples 'N+1 query check', threshold: 3
end
context 'when requesting labels' do
let(:requested_fields) { ['labels { nodes { id } }'] }
before do
project_labels = create_list(:label, 2, project: project)
group_labels = create_list(:group_label, 2, group: group)
merge_request_c.update!(labels: [project_labels, group_labels].flatten)
end
include_examples 'N+1 query check', skip_cached: false
end
2021-03-08 18:12:59 +05:30
end
describe 'performance' do
let(:mr_fields) do
<<~SELECT
assignees { nodes { username } }
reviewers { nodes { username } }
participants { nodes { username } }
headPipeline { status }
2021-04-29 21:17:54 +05:30
timelogs { nodes { timeSpent } }
2021-03-08 18:12:59 +05:30
SELECT
end
let(:query) do
<<~GQL
query($first: Int) {
project(fullPath: "#{project.full_path}") {
mergeRequests(first: $first) {
2021-04-29 21:17:54 +05:30
nodes { iid #{mr_fields} }
2021-03-08 18:12:59 +05:30
}
}
}
GQL
end
before_all do
project.add_developer(current_user)
mrs = create_list(:merge_request, 10, :closed, :with_head_pipeline,
source_project: project,
author: current_user)
mrs.each do |mr|
mr.assignees << create(:user)
mr.assignees << current_user
mr.reviewers << create(:user)
mr.reviewers << current_user
2021-04-29 21:17:54 +05:30
mr.timelogs << create(:merge_request_timelog, merge_request: mr)
2021-03-08 18:12:59 +05:30
end
end
before do
# Confounding factor: makes DB calls in EE
allow(Gitlab::Database).to receive(:read_only?).and_return(false)
end
2021-04-17 20:07:23 +05:30
def query_context
{ current_user: current_user }
end
2021-03-08 18:12:59 +05:30
def run_query(number)
# Ensure that we have a fresh request store and batch-context between runs
2021-04-17 20:07:23 +05:30
vars = { first: number }
result = run_with_clean_state(query, context: query_context, variables: vars)
2021-03-08 18:12:59 +05:30
graphql_dig_at(result.to_h, :data, :project, :merge_requests, :nodes)
end
def user_collection
2021-04-29 21:17:54 +05:30
{ 'nodes' => be_present.and(all(match(a_hash_including('username' => be_present)))) }
2021-03-08 18:12:59 +05:30
end
it 'returns appropriate results' do
mrs = run_query(2)
expect(mrs.size).to eq(2)
expect(mrs).to all(
match(
a_hash_including(
'assignees' => user_collection,
'reviewers' => user_collection,
'participants' => user_collection,
2021-04-29 21:17:54 +05:30
'headPipeline' => { 'status' => be_present },
'timelogs' => { 'nodes' => be_one }
2021-03-08 18:12:59 +05:30
)))
end
it 'can lookahead to eliminate N+1 queries' do
baseline = ActiveRecord::QueryRecorder.new { run_query(1) }
expect { run_query(10) }.not_to exceed_query_limit(baseline)
end
2020-10-24 23:57:45 +05:30
end
2021-01-03 14:25:43 +05:30
2020-11-24 15:15:51 +05:30
describe 'sorting and pagination' do
let(:data_path) { [:project, :mergeRequests] }
2022-07-16 23:28:13 +05:30
def pagination_results_data(nodes)
nodes
end
2021-02-22 17:27:13 +05:30
def pagination_query(params)
2021-04-17 20:07:23 +05:30
graphql_query_for(:project, { full_path: project.full_path }, <<~QUERY)
2020-11-24 15:15:51 +05:30
mergeRequests(#{params}) {
2021-02-22 17:27:13 +05:30
#{page_info} nodes { id }
2020-11-24 15:15:51 +05:30
}
2021-04-17 20:07:23 +05:30
QUERY
2020-11-24 15:15:51 +05:30
end
context 'when sorting by merged_at DESC' do
2021-04-17 20:07:23 +05:30
let(:sort_param) { :MERGED_AT_DESC }
2021-11-18 22:05:49 +05:30
let(:all_records) do
2021-04-17 20:07:23 +05:30
[
merge_request_b,
merge_request_d,
merge_request_c,
merge_request_e,
merge_request_a
2022-07-16 23:28:13 +05:30
].map { |mr| a_graphql_entity_for(mr) }
2021-04-17 20:07:23 +05:30
end
2020-11-24 15:15:51 +05:30
2021-04-17 20:07:23 +05:30
before do
five_days_ago = 5.days.ago
2020-11-24 15:15:51 +05:30
2021-04-17 20:07:23 +05:30
merge_request_d.metrics.update!(merged_at: five_days_ago)
# same merged_at, the second order column will decide (merge_request.id)
merge_request_c.metrics.update!(merged_at: five_days_ago)
merge_request_b.metrics.update!(merged_at: 1.day.ago)
end
it_behaves_like 'sorted paginated query' do
let(:first_param) { 2 }
end
2021-01-03 14:25:43 +05:30
2021-04-17 20:07:23 +05:30
context 'when last parameter is given' do
let(:params) { graphql_args(sort: sort_param, last: 2) }
let(:page_info) { nil }
2021-01-03 14:25:43 +05:30
2021-04-17 20:07:23 +05:30
it 'takes the last 2 records' do
query = pagination_query(params)
post_graphql(query, current_user: current_user)
2021-01-03 14:25:43 +05:30
2022-07-16 23:28:13 +05:30
expect(results).to match(all_records.last(2))
2020-11-24 15:15:51 +05:30
end
end
end
2021-10-27 15:23:28 +05:30
context 'when sorting by closed_at DESC' do
let(:sort_param) { :CLOSED_AT_DESC }
2021-11-18 22:05:49 +05:30
let(:all_records) do
2021-10-27 15:23:28 +05:30
[
merge_request_b,
merge_request_d,
merge_request_c,
merge_request_e,
merge_request_a
2022-07-16 23:28:13 +05:30
].map { |mr| a_graphql_entity_for(mr) }
2021-10-27 15:23:28 +05:30
end
before do
five_days_ago = 5.days.ago
merge_request_d.metrics.update!(latest_closed_at: five_days_ago)
# same merged_at, the second order column will decide (merge_request.id)
merge_request_c.metrics.update!(latest_closed_at: five_days_ago)
merge_request_b.metrics.update!(latest_closed_at: 1.day.ago)
end
it_behaves_like 'sorted paginated query' do
let(:first_param) { 2 }
end
context 'when last parameter is given' do
let(:params) { graphql_args(sort: sort_param, last: 2) }
let(:page_info) { nil }
it 'takes the last 2 records' do
query = pagination_query(params)
post_graphql(query, current_user: current_user)
2022-07-16 23:28:13 +05:30
expect(results).to match(all_records.last(2))
2021-10-27 15:23:28 +05:30
end
end
end
2020-11-24 15:15:51 +05:30
end
2021-03-11 19:13:27 +05:30
context 'when only the count is requested' do
2022-07-16 23:28:13 +05:30
let_it_be(:merged_at) { Time.new(2020, 1, 3) }
2021-03-11 19:13:27 +05:30
context 'when merged at filter is present' do
let_it_be(:merge_request) do
create(:merge_request, :unique_branches, source_project: project).tap do |mr|
2022-07-16 23:28:13 +05:30
mr.metrics.update!(merged_at: merged_at, created_at: merged_at - 2.days)
2021-03-11 19:13:27 +05:30
end
end
let(:query) do
# Note: __typename meta field is always requested by the FE
2021-04-17 20:07:23 +05:30
graphql_query_for(:project, { full_path: project.full_path }, <<~QUERY)
2021-03-11 19:13:27 +05:30
mergeRequests(mergedAfter: "2020-01-01", mergedBefore: "2020-01-05", first: 0, sourceBranches: null, labels: null) {
count
__typename
}
QUERY
end
2021-04-17 20:07:23 +05:30
it 'does not query the merge requests table for the count' do
query_recorder = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
2021-03-11 19:13:27 +05:30
2022-07-16 23:28:13 +05:30
queries = query_recorder.log
2021-04-17 20:07:23 +05:30
expect(queries).not_to include(match(/SELECT COUNT\(\*\) FROM "merge_requests"/))
expect(queries).to include(match(/SELECT COUNT\(\*\) FROM "merge_request_metrics"/))
2021-03-11 19:13:27 +05:30
end
2021-04-17 20:07:23 +05:30
context 'when total_time_to_merge and count is queried' do
2022-07-16 23:28:13 +05:30
let_it_be(:merge_request_2) do
create(:merge_request, :unique_branches, source_project: project).tap do |mr|
mr.metrics.update!(merged_at: merged_at, created_at: merged_at - 1.day)
end
end
2021-04-17 20:07:23 +05:30
let(:query) do
graphql_query_for(:project, { full_path: project.full_path }, <<~QUERY)
mergeRequests(mergedAfter: "2020-01-01", mergedBefore: "2020-01-05", first: 0) {
totalTimeToMerge
count
}
QUERY
2021-03-11 19:13:27 +05:30
end
2022-07-16 23:28:13 +05:30
it 'uses the merge_request_metrics table for total_time_to_merge' do
2021-03-11 19:13:27 +05:30
query_recorder = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
2022-07-16 23:28:13 +05:30
expect(query_recorder.log).to include(match(/SELECT.+SUM.+FROM "merge_request_metrics" WHERE/))
end
it 'returns the correct total time to merge' do
post_graphql(query, current_user: current_user)
sum = graphql_data_at(:project, :merge_requests, :total_time_to_merge)
expect(sum).to eq(3.days.to_f)
2021-03-11 19:13:27 +05:30
end
2021-04-17 20:07:23 +05:30
end
2021-03-11 19:13:27 +05:30
2021-04-17 20:07:23 +05:30
it 'returns the correct count' do
post_graphql(query, current_user: current_user)
2021-03-11 19:13:27 +05:30
2021-04-17 20:07:23 +05:30
count = graphql_data.dig('project', 'mergeRequests', 'count')
expect(count).to eq(1)
2021-03-11 19:13:27 +05:30
end
end
end
2020-06-23 00:09:42 +05:30
end