2019-10-12 21:52:04 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
module GraphqlHelpers
|
2018-11-18 11:00:15 +05:30
|
|
|
MutationDefinition = Struct.new(:query, :variables)
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
NoData = Class.new(StandardError)
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
# makes an underscored string look like a fieldname
|
|
|
|
# "merge_request" => "mergeRequest"
|
|
|
|
def self.fieldnamerize(underscored_field_name)
|
2019-09-30 21:07:59 +05:30
|
|
|
underscored_field_name.to_s.camelize(:lower)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
# Run a loader's named resolver in a way that closely mimics the framework.
|
|
|
|
#
|
|
|
|
# First the `ready?` method is called. If it turns out that the resolver is not
|
|
|
|
# ready, then the early return is returned instead.
|
|
|
|
#
|
|
|
|
# Then the resolve method is called.
|
2020-04-08 14:13:33 +05:30
|
|
|
def resolve(resolver_class, obj: nil, args: {}, ctx: {}, field: nil)
|
2020-06-23 00:09:42 +05:30
|
|
|
resolver = resolver_class.new(object: obj, context: ctx, field: field)
|
|
|
|
ready, early_return = sync_all { resolver.ready?(**args) }
|
|
|
|
|
|
|
|
return early_return unless ready
|
|
|
|
|
2021-01-03 14:25:43 +05:30
|
|
|
resolver.resolve(**args)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
# Eagerly run a loader's named resolver
|
|
|
|
# (syncs any lazy values returned by resolve)
|
|
|
|
def eager_resolve(resolver_class, **opts)
|
|
|
|
sync(resolve(resolver_class, **opts))
|
|
|
|
end
|
|
|
|
|
|
|
|
def sync(value)
|
|
|
|
if GitlabSchema.lazy?(value)
|
|
|
|
GitlabSchema.sync_lazy(value)
|
|
|
|
else
|
|
|
|
value
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
# Runs a block inside a BatchLoader::Executor wrapper
|
|
|
|
def batch(max_queries: nil, &blk)
|
|
|
|
wrapper = proc do
|
2019-07-07 11:18:12 +05:30
|
|
|
BatchLoader::Executor.ensure_current
|
|
|
|
yield
|
|
|
|
ensure
|
|
|
|
BatchLoader::Executor.clear_current
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
if max_queries
|
|
|
|
result = nil
|
|
|
|
expect { result = wrapper.call }.not_to exceed_query_limit(max_queries)
|
|
|
|
result
|
|
|
|
else
|
|
|
|
wrapper.call
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
# BatchLoader::GraphQL returns a wrapper, so we need to :sync in order
|
|
|
|
# to get the actual values
|
|
|
|
def batch_sync(max_queries: nil, &blk)
|
2020-06-23 00:09:42 +05:30
|
|
|
batch(max_queries: max_queries) { sync_all(&blk) }
|
|
|
|
end
|
2019-12-04 20:38:33 +05:30
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
def sync_all(&blk)
|
|
|
|
lazy_vals = yield
|
|
|
|
lazy_vals.is_a?(Array) ? lazy_vals.map { |val| sync(val) } : sync(lazy_vals)
|
2019-12-04 20:38:33 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
def graphql_query_for(name, attributes = {}, fields = nil)
|
|
|
|
<<~QUERY
|
|
|
|
{
|
|
|
|
#{query_graphql_field(name, attributes, fields)}
|
|
|
|
}
|
|
|
|
QUERY
|
|
|
|
end
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
def graphql_mutation(name, input, fields = nil, &block)
|
|
|
|
raise ArgumentError, 'Please pass either `fields` parameter or a block to `#graphql_mutation`, but not both.' if fields.present? && block_given?
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
mutation_name = GraphqlHelpers.fieldnamerize(name)
|
|
|
|
input_variable_name = "$#{input_variable_name_for_mutation(name)}"
|
|
|
|
mutation_field = GitlabSchema.mutation.fields[mutation_name]
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
fields = yield if block_given?
|
2020-04-22 19:07:51 +05:30
|
|
|
fields ||= all_graphql_fields_for(mutation_field.type.to_graphql)
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
query = <<~MUTATION
|
2020-04-22 19:07:51 +05:30
|
|
|
mutation(#{input_variable_name}: #{mutation_field.arguments['input'].type.to_graphql}) {
|
2018-11-18 11:00:15 +05:30
|
|
|
#{mutation_name}(input: #{input_variable_name}) {
|
|
|
|
#{fields}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
MUTATION
|
|
|
|
variables = variables_for_mutation(name, input)
|
|
|
|
|
|
|
|
MutationDefinition.new(query, variables)
|
|
|
|
end
|
|
|
|
|
|
|
|
def variables_for_mutation(name, input)
|
2019-09-30 21:07:59 +05:30
|
|
|
graphql_input = prepare_input_for_mutation(input)
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
result = { input_variable_name_for_mutation(name) => graphql_input }
|
|
|
|
|
|
|
|
# Avoid trying to serialize multipart data into JSON
|
|
|
|
if graphql_input.values.none? { |value| io_value?(value) }
|
|
|
|
result.to_json
|
|
|
|
else
|
|
|
|
result
|
|
|
|
end
|
2018-11-18 11:00:15 +05:30
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
# Recursively convert a Hash with Ruby-style keys to GraphQL fieldname-style keys
|
|
|
|
#
|
|
|
|
# prepare_input_for_mutation({ 'my_key' => 1 })
|
|
|
|
# => { 'myKey' => 1}
|
|
|
|
def prepare_input_for_mutation(input)
|
|
|
|
input.map do |name, value|
|
|
|
|
value = prepare_input_for_mutation(value) if value.is_a?(Hash)
|
|
|
|
|
|
|
|
[GraphqlHelpers.fieldnamerize(name), value]
|
|
|
|
end.to_h
|
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
def input_variable_name_for_mutation(mutation_name)
|
|
|
|
mutation_name = GraphqlHelpers.fieldnamerize(mutation_name)
|
|
|
|
mutation_field = GitlabSchema.mutation.fields[mutation_name]
|
|
|
|
input_type = field_type(mutation_field.arguments['input'])
|
|
|
|
|
|
|
|
GraphqlHelpers.fieldnamerize(input_type)
|
|
|
|
end
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
def field_with_params(name, attributes = {})
|
|
|
|
namerized = GraphqlHelpers.fieldnamerize(name.to_s)
|
|
|
|
return "#{namerized}" if attributes.blank?
|
|
|
|
|
|
|
|
field_params = if attributes.is_a?(Hash)
|
2020-03-13 15:44:24 +05:30
|
|
|
"(#{attributes_to_graphql(attributes)})"
|
|
|
|
else
|
2020-04-22 19:07:51 +05:30
|
|
|
"(#{attributes})"
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
"#{namerized}#{field_params}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def query_graphql_field(name, attributes = {}, fields = nil)
|
2018-11-08 19:23:39 +05:30
|
|
|
<<~QUERY
|
2020-04-22 19:07:51 +05:30
|
|
|
#{field_with_params(name, attributes)}
|
2020-03-13 15:44:24 +05:30
|
|
|
#{wrap_fields(fields || all_graphql_fields_for(name.to_s.classify))}
|
2018-11-08 19:23:39 +05:30
|
|
|
QUERY
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
def wrap_fields(fields)
|
2020-06-23 00:09:42 +05:30
|
|
|
fields = Array.wrap(fields).map do |field|
|
|
|
|
case field
|
|
|
|
when Symbol
|
|
|
|
GraphqlHelpers.fieldnamerize(field)
|
|
|
|
else
|
|
|
|
field
|
|
|
|
end
|
|
|
|
end.join("\n")
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
return unless fields.present?
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
<<~FIELDS
|
|
|
|
{
|
|
|
|
#{fields}
|
|
|
|
}
|
|
|
|
FIELDS
|
|
|
|
end
|
|
|
|
|
2020-04-22 19:07:51 +05:30
|
|
|
def all_graphql_fields_for(class_name, parent_types = Set.new, max_depth: 3, excluded: [])
|
2019-12-04 20:38:33 +05:30
|
|
|
# pulling _all_ fields can generate a _huge_ query (like complexity 180,000),
|
|
|
|
# and significantly increase spec runtime. so limit the depth by default
|
|
|
|
return if max_depth <= 0
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
allow_unlimited_graphql_complexity
|
2019-09-04 21:01:54 +05:30
|
|
|
allow_unlimited_graphql_depth
|
2019-10-31 01:37:42 +05:30
|
|
|
allow_high_graphql_recursion
|
2020-03-13 15:44:24 +05:30
|
|
|
allow_high_graphql_transaction_threshold
|
2019-07-07 11:18:12 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
type = GitlabSchema.types[class_name.to_s]
|
|
|
|
return "" unless type
|
|
|
|
|
|
|
|
type.fields.map do |name, field|
|
|
|
|
# We can't guess arguments, so skip fields that require them
|
|
|
|
next if required_arguments?(field)
|
2020-04-22 19:07:51 +05:30
|
|
|
next if excluded.include?(name)
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
singular_field_type = field_type(field)
|
|
|
|
|
|
|
|
# If field type is the same as parent type, then we're hitting into
|
|
|
|
# mutual dependency. Break it from infinite recursion
|
|
|
|
next if parent_types.include?(singular_field_type)
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
if nested_fields?(field)
|
2019-07-07 11:18:12 +05:30
|
|
|
fields =
|
2019-12-04 20:38:33 +05:30
|
|
|
all_graphql_fields_for(singular_field_type, parent_types | [type], max_depth: max_depth - 1)
|
2019-07-07 11:18:12 +05:30
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
"#{name} { #{fields} }" unless fields.blank?
|
2018-11-08 19:23:39 +05:30
|
|
|
else
|
|
|
|
name
|
|
|
|
end
|
|
|
|
end.compact.join("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
def attributes_to_graphql(attributes)
|
|
|
|
attributes.map do |name, value|
|
2020-03-13 15:44:24 +05:30
|
|
|
value_str = as_graphql_literal(value)
|
2019-12-26 22:10:19 +05:30
|
|
|
|
|
|
|
"#{GraphqlHelpers.fieldnamerize(name.to_s)}: #{value_str}"
|
2018-11-08 19:23:39 +05:30
|
|
|
end.join(", ")
|
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
# Fairly dumb Ruby => GraphQL rendering function. Only suitable for testing.
|
|
|
|
# Use symbol for Enum values
|
|
|
|
def as_graphql_literal(value)
|
|
|
|
case value
|
|
|
|
when Array then "[#{value.map { |v| as_graphql_literal(v) }.join(',')}]"
|
2021-01-03 14:25:43 +05:30
|
|
|
when Hash then "{#{attributes_to_graphql(value)}}"
|
2020-03-13 15:44:24 +05:30
|
|
|
when Integer, Float then value.to_s
|
|
|
|
when String then "\"#{value.gsub(/"/, '\\"')}\""
|
|
|
|
when Symbol then value
|
|
|
|
when nil then 'null'
|
|
|
|
when true then 'true'
|
|
|
|
when false then 'false'
|
|
|
|
else raise ArgumentError, "Cannot represent #{value} as GraphQL literal"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-04 21:01:54 +05:30
|
|
|
def post_multiplex(queries, current_user: nil, headers: {})
|
|
|
|
post api('/', current_user, version: 'graphql'), params: { _json: queries }, headers: headers
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
def post_graphql(query, current_user: nil, variables: nil, headers: {})
|
2021-01-03 14:25:43 +05:30
|
|
|
params = { query: query, variables: variables&.to_json }
|
|
|
|
post api('/', current_user, version: 'graphql'), params: params, headers: headers
|
2018-11-18 11:00:15 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def post_graphql_mutation(mutation, current_user: nil)
|
|
|
|
post_graphql(mutation.query, current_user: current_user, variables: mutation.variables)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
2020-11-24 15:15:51 +05:30
|
|
|
def post_graphql_mutation_with_uploads(mutation, current_user: nil)
|
|
|
|
file_paths = file_paths_in_mutation(mutation)
|
|
|
|
params = mutation_to_apollo_uploads_param(mutation, files: file_paths)
|
|
|
|
|
|
|
|
workhorse_post_with_file(api('/', current_user, version: 'graphql'),
|
|
|
|
params: params,
|
|
|
|
file_key: '1'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def file_paths_in_mutation(mutation)
|
|
|
|
paths = []
|
|
|
|
find_uploads(paths, [], mutation.variables)
|
|
|
|
|
|
|
|
paths
|
|
|
|
end
|
|
|
|
|
|
|
|
# Depth first search for UploadedFile values
|
|
|
|
def find_uploads(paths, path, value)
|
|
|
|
case value
|
|
|
|
when Rack::Test::UploadedFile
|
|
|
|
paths << path
|
|
|
|
when Hash
|
|
|
|
value.each do |k, v|
|
|
|
|
find_uploads(paths, path + [k], v)
|
|
|
|
end
|
|
|
|
when Array
|
|
|
|
value.each_with_index do |v, i|
|
|
|
|
find_uploads(paths, path + [i], v)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
# this implements GraphQL multipart request v2
|
|
|
|
# https://github.com/jaydenseric/graphql-multipart-request-spec/tree/v2.0.0-alpha.2
|
|
|
|
# this is simplified and do not support file deduplication
|
|
|
|
def mutation_to_apollo_uploads_param(mutation, files: [])
|
|
|
|
operations = { 'query' => mutation.query, 'variables' => mutation.variables }
|
|
|
|
map = {}
|
|
|
|
extracted_files = {}
|
|
|
|
|
|
|
|
files.each_with_index do |file_path, idx|
|
|
|
|
apollo_idx = (idx + 1).to_s
|
|
|
|
parent_dig_path = file_path[0..-2]
|
|
|
|
file_key = file_path[-1]
|
|
|
|
|
|
|
|
parent = operations['variables']
|
|
|
|
parent = parent.dig(*parent_dig_path) unless parent_dig_path.empty?
|
|
|
|
|
|
|
|
extracted_files[apollo_idx] = parent[file_key]
|
|
|
|
parent[file_key] = nil
|
|
|
|
|
|
|
|
map[apollo_idx] = ["variables.#{file_path.join('.')}"]
|
|
|
|
end
|
|
|
|
|
|
|
|
{ operations: operations.to_json, map: map.to_json }.merge(extracted_files)
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
# Raises an error if no data is found
|
2018-11-08 19:23:39 +05:30
|
|
|
def graphql_data
|
2020-05-24 23:13:21 +05:30
|
|
|
# Note that `json_response` is defined as `let(:json_response)` and
|
|
|
|
# therefore, in a spec with multiple queries, will only contain data
|
|
|
|
# from the _first_ query, not subsequent ones
|
2019-10-12 21:52:04 +05:30
|
|
|
json_response['data'] || (raise NoData, graphql_errors)
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
def graphql_data_at(*path)
|
2020-05-24 23:13:21 +05:30
|
|
|
graphql_dig_at(graphql_data, *path)
|
|
|
|
end
|
|
|
|
|
|
|
|
def graphql_dig_at(data, *path)
|
2020-06-23 00:09:42 +05:30
|
|
|
keys = path.map { |segment| segment.is_a?(Integer) ? segment : GraphqlHelpers.fieldnamerize(segment) }
|
|
|
|
|
|
|
|
# Allows for array indexing, like this
|
|
|
|
# ['project', 'boards', 'edges', 0, 'node', 'lists']
|
|
|
|
keys.reduce(data) do |memo, key|
|
|
|
|
memo.is_a?(Array) ? memo[key] : memo&.dig(key)
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
def graphql_errors
|
2019-09-04 21:01:54 +05:30
|
|
|
case json_response
|
|
|
|
when Hash # regular query
|
|
|
|
json_response['errors']
|
|
|
|
when Array # multiplexed queries
|
|
|
|
json_response.map { |response| response['errors'] }
|
|
|
|
else
|
|
|
|
raise "Unknown GraphQL response type #{json_response.class}"
|
|
|
|
end
|
2018-11-18 11:00:15 +05:30
|
|
|
end
|
|
|
|
|
2019-10-31 01:37:42 +05:30
|
|
|
def expect_graphql_errors_to_include(regexes_to_match)
|
|
|
|
raise "No errors. Was expecting to match #{regexes_to_match}" if graphql_errors.nil? || graphql_errors.empty?
|
|
|
|
|
|
|
|
error_messages = flattened_errors.collect { |error_hash| error_hash["message"] }
|
|
|
|
Array.wrap(regexes_to_match).flatten.each do |regex|
|
|
|
|
expect(error_messages).to include a_string_matching regex
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def expect_graphql_errors_to_be_empty
|
|
|
|
expect(flattened_errors).to be_empty
|
|
|
|
end
|
|
|
|
|
|
|
|
def flattened_errors
|
|
|
|
Array.wrap(graphql_errors).flatten.compact
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
# Raises an error if no response is found
|
2018-11-18 11:00:15 +05:30
|
|
|
def graphql_mutation_response(mutation_name)
|
2019-10-12 21:52:04 +05:30
|
|
|
graphql_data.fetch(GraphqlHelpers.fieldnamerize(mutation_name))
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
|
|
|
|
2020-06-23 00:09:42 +05:30
|
|
|
def scalar_fields_of(type_name)
|
|
|
|
GitlabSchema.types[type_name].fields.map do |name, field|
|
|
|
|
next if nested_fields?(field) || required_arguments?(field)
|
|
|
|
|
|
|
|
name
|
|
|
|
end.compact
|
|
|
|
end
|
|
|
|
|
|
|
|
def nested_fields_of(type_name)
|
|
|
|
GitlabSchema.types[type_name].fields.map do |name, field|
|
|
|
|
next if !nested_fields?(field) || required_arguments?(field)
|
|
|
|
|
|
|
|
[name, field]
|
|
|
|
end.compact
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
def nested_fields?(field)
|
|
|
|
!scalar?(field) && !enum?(field)
|
|
|
|
end
|
|
|
|
|
|
|
|
def scalar?(field)
|
|
|
|
field_type(field).kind.scalar?
|
|
|
|
end
|
|
|
|
|
|
|
|
def enum?(field)
|
|
|
|
field_type(field).kind.enum?
|
|
|
|
end
|
|
|
|
|
|
|
|
def required_arguments?(field)
|
|
|
|
field.arguments.values.any? { |argument| argument.type.non_null? }
|
|
|
|
end
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
def io_value?(value)
|
|
|
|
Array.wrap(value).any? { |v| v.respond_to?(:to_io) }
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
def field_type(field)
|
2020-04-22 19:07:51 +05:30
|
|
|
field_type = field.type.respond_to?(:to_graphql) ? field.type.to_graphql : field.type
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
# The type could be nested. For example `[GraphQL::STRING_TYPE]`:
|
|
|
|
# - List
|
|
|
|
# - String!
|
|
|
|
# - String
|
2019-03-02 22:35:43 +05:30
|
|
|
field_type = field_type.of_type while field_type.respond_to?(:of_type)
|
2018-11-18 11:00:15 +05:30
|
|
|
|
|
|
|
field_type
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
# for most tests, we want to allow unlimited complexity
|
|
|
|
def allow_unlimited_graphql_complexity
|
|
|
|
allow_any_instance_of(GitlabSchema).to receive(:max_complexity).and_return nil
|
|
|
|
allow(GitlabSchema).to receive(:max_query_complexity).with(any_args).and_return nil
|
|
|
|
end
|
2019-09-04 21:01:54 +05:30
|
|
|
|
|
|
|
def allow_unlimited_graphql_depth
|
|
|
|
allow_any_instance_of(GitlabSchema).to receive(:max_depth).and_return nil
|
|
|
|
allow(GitlabSchema).to receive(:max_query_depth).with(any_args).and_return nil
|
|
|
|
end
|
2019-10-31 01:37:42 +05:30
|
|
|
|
|
|
|
def allow_high_graphql_recursion
|
|
|
|
allow_any_instance_of(Gitlab::Graphql::QueryAnalyzers::RecursionAnalyzer).to receive(:recursion_threshold).and_return 1000
|
|
|
|
end
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2020-03-13 15:44:24 +05:30
|
|
|
def allow_high_graphql_transaction_threshold
|
|
|
|
stub_const("Gitlab::QueryLimiting::Transaction::THRESHOLD", 1000)
|
|
|
|
end
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
def node_array(data, extract_attribute = nil)
|
|
|
|
data.map do |item|
|
|
|
|
extract_attribute ? item['node'][extract_attribute] : item['node']
|
|
|
|
end
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
|
|
|
|
def global_id_of(model)
|
|
|
|
model.to_global_id.to_s
|
|
|
|
end
|
2020-03-13 15:44:24 +05:30
|
|
|
|
|
|
|
def missing_required_argument(path, argument)
|
|
|
|
a_hash_including(
|
|
|
|
'path' => ['query'].concat(path),
|
|
|
|
'extensions' => a_hash_including('code' => 'missingRequiredArguments', 'arguments' => argument.to_s)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def custom_graphql_error(path, msg)
|
|
|
|
a_hash_including('path' => path, 'message' => msg)
|
|
|
|
end
|
|
|
|
|
|
|
|
def type_factory
|
|
|
|
Class.new(Types::BaseObject) do
|
|
|
|
graphql_name 'TestType'
|
|
|
|
|
|
|
|
field :name, GraphQL::STRING_TYPE, null: true
|
|
|
|
|
|
|
|
yield(self) if block_given?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def query_factory
|
|
|
|
Class.new(Types::BaseObject) do
|
|
|
|
graphql_name 'TestQuery'
|
|
|
|
|
|
|
|
yield(self) if block_given?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute_query(query_type)
|
|
|
|
schema = Class.new(GraphQL::Schema) do
|
2020-04-22 19:07:51 +05:30
|
|
|
use GraphQL::Pagination::Connections
|
2020-03-13 15:44:24 +05:30
|
|
|
use Gitlab::Graphql::Authorize
|
2020-04-22 19:07:51 +05:30
|
|
|
use Gitlab::Graphql::Pagination::Connections
|
2020-03-13 15:44:24 +05:30
|
|
|
|
|
|
|
query(query_type)
|
|
|
|
end
|
|
|
|
|
|
|
|
schema.execute(
|
|
|
|
query_string,
|
|
|
|
context: { current_user: user },
|
|
|
|
variables: {}
|
|
|
|
)
|
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
2019-09-04 21:01:54 +05:30
|
|
|
|
|
|
|
# This warms our schema, doing this as part of loading the helpers to avoid
|
|
|
|
# duplicate loading error when Rails tries autoload the types.
|
|
|
|
GitlabSchema.graphql_definition
|