2020-10-24 23:57:45 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
require 'spec_helper'
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
RSpec.describe 'Query.jobs', feature_category: :continuous_integration do
|
|
|
|
include GraphqlHelpers
|
|
|
|
|
|
|
|
let_it_be(:admin) { create(:admin) }
|
|
|
|
let_it_be(:project) { create(:project, :repository, :public) }
|
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
2023-07-09 08:55:56 +05:30
|
|
|
let_it_be(:runner) { create(:ci_runner) }
|
2023-06-20 00:43:36 +05:30
|
|
|
let_it_be(:build) do
|
2023-07-09 08:55:56 +05:30
|
|
|
create(:ci_build, pipeline: pipeline, name: 'my test job', ref: 'HEAD', tag_list: %w[tag1 tag2], runner: runner)
|
2023-06-20 00:43:36 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
id
|
|
|
|
#{fields.join(' ')}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:jobs_graphql_data) { graphql_data_at(:jobs, :nodes) }
|
|
|
|
|
|
|
|
let(:fields) do
|
2023-07-09 08:55:56 +05:30
|
|
|
%w[commitPath refPath webPath browseArtifactsPath playPath tags runner{id}]
|
2023-06-20 00:43:36 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the paths in each job of a pipeline' do
|
|
|
|
post_graphql(query, current_user: admin)
|
|
|
|
|
|
|
|
expect(jobs_graphql_data).to contain_exactly(
|
|
|
|
a_graphql_entity_for(
|
|
|
|
build,
|
|
|
|
commit_path: "/#{project.full_path}/-/commit/#{build.sha}",
|
|
|
|
ref_path: "/#{project.full_path}/-/commits/HEAD",
|
|
|
|
web_path: "/#{project.full_path}/-/jobs/#{build.id}",
|
|
|
|
browse_artifacts_path: "/#{project.full_path}/-/jobs/#{build.id}/artifacts/browse",
|
|
|
|
play_path: "/#{project.full_path}/-/jobs/#{build.id}/play",
|
2023-07-09 08:55:56 +05:30
|
|
|
tags: build.tag_list,
|
|
|
|
runner: a_graphql_entity_for(runner)
|
2023-06-20 00:43:36 +05:30
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when requesting individual fields' do
|
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
|
|
|
|
let_it_be(:admin2) { create(:admin) }
|
|
|
|
let_it_be(:project2) { create(:project) }
|
|
|
|
let_it_be(:pipeline2) { create(:ci_pipeline, project: project2) }
|
|
|
|
|
|
|
|
where(:field) { fields }
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
let(:fields) do
|
|
|
|
[field]
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
|
|
|
|
# warm-up cache and so on:
|
|
|
|
args = { current_user: admin }
|
|
|
|
args2 = { current_user: admin2 }
|
|
|
|
post_graphql(query, **args2)
|
|
|
|
|
|
|
|
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
|
|
|
|
post_graphql(query, **args)
|
|
|
|
end
|
|
|
|
|
|
|
|
create(:ci_build, pipeline: pipeline2, name: 'my test job2', ref: 'HEAD', tag_list: %w[tag3])
|
|
|
|
post_graphql(query, **args)
|
|
|
|
|
|
|
|
expect { post_graphql(query, **args) }.not_to exceed_all_query_limit(control)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-07-09 08:55:56 +05:30
|
|
|
RSpec.describe 'Query.jobs.runner', feature_category: :continuous_integration do
|
|
|
|
include GraphqlHelpers
|
|
|
|
|
|
|
|
let_it_be(:admin) { create(:admin) }
|
|
|
|
|
|
|
|
let(:jobs_runner_graphql_data) { graphql_data_at(:jobs, :nodes, :runner) }
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
runner{
|
|
|
|
id
|
|
|
|
adminUrl
|
|
|
|
description
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job has no runner' do
|
|
|
|
let_it_be(:build) { create(:ci_build) }
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
post_graphql(query, current_user: admin)
|
|
|
|
|
|
|
|
expect(jobs_runner_graphql_data).to eq([nil])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when job has runner' do
|
|
|
|
let_it_be(:runner) { create(:ci_runner) }
|
|
|
|
let_it_be(:build_with_runner) { create(:ci_build, runner: runner) }
|
|
|
|
|
|
|
|
it 'returns runner attributes' do
|
|
|
|
post_graphql(query, current_user: admin)
|
|
|
|
|
|
|
|
expect(jobs_runner_graphql_data).to contain_exactly(a_graphql_entity_for(runner, :description, 'adminUrl' => "http://localhost/admin/runners/#{runner.id}"))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-03-04 22:38:38 +05:30
|
|
|
RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integration do
|
2020-10-24 23:57:45 +05:30
|
|
|
include GraphqlHelpers
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository, :public) }
|
|
|
|
let_it_be(:user) { create(:user) }
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
def all(*fields)
|
|
|
|
fields.flat_map { |f| [f, :nodes] }
|
2020-10-24 23:57:45 +05:30
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
describe '.stages.groups.jobs' do
|
|
|
|
let(:pipeline) do
|
|
|
|
pipeline = create(:ci_pipeline, project: project, user: user)
|
2022-08-13 15:12:31 +05:30
|
|
|
stage = create(:ci_stage, project: project, pipeline: pipeline, name: 'first', position: 1)
|
2022-01-26 12:08:38 +05:30
|
|
|
create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'my test job', scheduling_type: :stage)
|
2021-02-22 17:27:13 +05:30
|
|
|
|
|
|
|
pipeline
|
|
|
|
end
|
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
let(:jobs_graphql_data) { graphql_data_at(:project, :pipeline, *all(:stages, :groups, :jobs)) }
|
|
|
|
|
|
|
|
let(:first_n) { var('Int') }
|
2021-02-22 17:27:13 +05:30
|
|
|
|
|
|
|
let(:query) do
|
2022-11-25 23:54:43 +05:30
|
|
|
with_signature([first_n], wrap_fields(query_graphql_path(
|
|
|
|
[
|
|
|
|
[:project, { full_path: project.full_path }],
|
|
|
|
[:pipeline, { iid: pipeline.iid.to_s }],
|
|
|
|
[:stages, { first: first_n }]
|
|
|
|
], stage_fields)))
|
2021-03-08 18:12:59 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
let(:stage_fields) do
|
|
|
|
<<~FIELDS
|
|
|
|
nodes {
|
|
|
|
name
|
|
|
|
groups {
|
|
|
|
nodes {
|
2021-09-30 23:02:18 +05:30
|
|
|
detailedStatus {
|
|
|
|
id
|
|
|
|
}
|
2021-03-08 18:12:59 +05:30
|
|
|
name
|
|
|
|
jobs {
|
|
|
|
nodes {
|
2022-03-02 08:16:31 +05:30
|
|
|
downstreamPipeline {
|
|
|
|
id
|
|
|
|
path
|
|
|
|
}
|
2021-03-08 18:12:59 +05:30
|
|
|
name
|
|
|
|
needs {
|
|
|
|
nodes { #{all_graphql_fields_for('CiBuildNeed')} }
|
|
|
|
}
|
2022-01-26 12:08:38 +05:30
|
|
|
previousStageJobsOrNeeds {
|
|
|
|
nodes {
|
|
|
|
... on CiBuildNeed {
|
|
|
|
#{all_graphql_fields_for('CiBuildNeed')}
|
|
|
|
}
|
|
|
|
... on CiJob {
|
|
|
|
#{all_graphql_fields_for('CiJob')}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
detailedStatus {
|
|
|
|
id
|
|
|
|
}
|
2021-03-08 18:12:59 +05:30
|
|
|
pipeline {
|
|
|
|
id
|
2020-10-24 23:57:45 +05:30
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-03-08 18:12:59 +05:30
|
|
|
}
|
|
|
|
FIELDS
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
it 'returns the jobs of a pipeline stage' do
|
|
|
|
post_graphql(query, current_user: user)
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
expect(jobs_graphql_data).to contain_exactly(a_hash_including('name' => 'my test job'))
|
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2022-01-26 12:08:38 +05:30
|
|
|
context 'when there is more than one stage and job needs' do
|
2021-03-08 18:12:59 +05:30
|
|
|
before do
|
2022-08-13 15:12:31 +05:30
|
|
|
build_stage = create(:ci_stage, position: 2, name: 'build', project: project, pipeline: pipeline)
|
|
|
|
test_stage = create(:ci_stage, position: 3, name: 'test', project: project, pipeline: pipeline)
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2023-03-17 16:20:25 +05:30
|
|
|
create(:ci_build, pipeline: pipeline, name: 'docker 1 2', scheduling_type: :stage, ci_stage: build_stage, stage_idx: build_stage.position)
|
|
|
|
create(:ci_build, pipeline: pipeline, name: 'docker 2 2', ci_stage: build_stage, stage_idx: build_stage.position, scheduling_type: :dag)
|
|
|
|
create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', scheduling_type: :stage, ci_stage: test_stage, stage_idx: test_stage.position)
|
|
|
|
test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', scheduling_type: :dag, ci_stage: test_stage, stage_idx: test_stage.position)
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2022-01-26 12:08:38 +05:30
|
|
|
create(:ci_build_need, build: test_job, name: 'my test job')
|
2021-03-08 18:12:59 +05:30
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2023-04-23 21:23:45 +05:30
|
|
|
it 'reports the build needs and execution requirements' do
|
2021-12-11 22:18:48 +05:30
|
|
|
post_graphql(query, current_user: user)
|
2020-10-24 23:57:45 +05:30
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
expect(jobs_graphql_data).to contain_exactly(
|
2022-01-26 12:08:38 +05:30
|
|
|
a_hash_including(
|
|
|
|
'name' => 'my test job',
|
|
|
|
'needs' => { 'nodes' => [] },
|
|
|
|
'previousStageJobsOrNeeds' => { 'nodes' => [] }
|
|
|
|
),
|
|
|
|
a_hash_including(
|
|
|
|
'name' => 'docker 1 2',
|
|
|
|
'needs' => { 'nodes' => [] },
|
|
|
|
'previousStageJobsOrNeeds' => { 'nodes' => [
|
2023-01-13 00:05:48 +05:30
|
|
|
a_hash_including('name' => 'my test job')
|
2022-01-26 12:08:38 +05:30
|
|
|
] }
|
|
|
|
),
|
|
|
|
a_hash_including(
|
|
|
|
'name' => 'docker 2 2',
|
|
|
|
'needs' => { 'nodes' => [] },
|
|
|
|
'previousStageJobsOrNeeds' => { 'nodes' => [] }
|
|
|
|
),
|
|
|
|
a_hash_including(
|
|
|
|
'name' => 'rspec 1 2',
|
|
|
|
'needs' => { 'nodes' => [] },
|
|
|
|
'previousStageJobsOrNeeds' => { 'nodes' => [
|
|
|
|
a_hash_including('name' => 'docker 1 2'),
|
|
|
|
a_hash_including('name' => 'docker 2 2')
|
|
|
|
] }
|
|
|
|
),
|
|
|
|
a_hash_including(
|
|
|
|
'name' => 'rspec 2 2',
|
|
|
|
'needs' => { 'nodes' => [a_hash_including('name' => 'my test job')] },
|
|
|
|
'previousStageJobsOrNeeds' => { 'nodes' => [
|
2023-01-13 00:05:48 +05:30
|
|
|
a_hash_including('name' => 'my test job')
|
2022-01-26 12:08:38 +05:30
|
|
|
] }
|
|
|
|
)
|
2021-03-08 18:12:59 +05:30
|
|
|
)
|
2020-10-24 23:57:45 +05:30
|
|
|
end
|
2021-03-08 18:12:59 +05:30
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
|
2022-03-02 08:16:31 +05:30
|
|
|
create(:ci_bridge, name: 'bridge-1', pipeline: pipeline, downstream_pipeline: create(:ci_pipeline))
|
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
|
|
|
|
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
|
|
|
|
post_graphql(query, current_user: user)
|
2021-03-08 18:12:59 +05:30
|
|
|
end
|
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
create(:ci_build, name: 'test-a', pipeline: pipeline)
|
|
|
|
create(:ci_build, name: 'test-b', pipeline: pipeline)
|
2022-03-02 08:16:31 +05:30
|
|
|
create(:ci_bridge, name: 'bridge-2', pipeline: pipeline, downstream_pipeline: create(:ci_pipeline))
|
|
|
|
create(:ci_bridge, name: 'bridge-3', pipeline: pipeline, downstream_pipeline: create(:ci_pipeline))
|
2021-10-27 15:23:28 +05:30
|
|
|
|
2021-03-08 18:12:59 +05:30
|
|
|
expect do
|
2021-10-27 15:23:28 +05:30
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
end.not_to exceed_all_query_limit(control)
|
2020-10-24 23:57:45 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2021-02-22 17:27:13 +05:30
|
|
|
|
2022-06-21 17:19:12 +05:30
|
|
|
describe '.jobs.kind' do
|
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
project(fullPath: "#{project.full_path}") {
|
|
|
|
pipeline(iid: "#{pipeline.iid}") {
|
|
|
|
stages {
|
|
|
|
nodes {
|
|
|
|
groups{
|
|
|
|
nodes {
|
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
kind
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the job is a build' do
|
|
|
|
it 'returns BUILD' do
|
|
|
|
create(:ci_build, pipeline: pipeline)
|
|
|
|
|
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
|
|
|
|
job_data = graphql_data_at(:project, :pipeline, :stages, :nodes, :groups, :nodes, :jobs, :nodes).first
|
|
|
|
expect(job_data['kind']).to eq 'BUILD'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the job is a bridge' do
|
|
|
|
it 'returns BRIDGE' do
|
|
|
|
create(:ci_bridge, pipeline: pipeline)
|
|
|
|
|
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
|
|
|
|
job_data = graphql_data_at(:project, :pipeline, :stages, :nodes, :groups, :nodes, :jobs, :nodes).first
|
|
|
|
expect(job_data['kind']).to eq 'BRIDGE'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-22 17:27:13 +05:30
|
|
|
describe '.jobs.artifacts' do
|
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
project(fullPath: "#{project.full_path}") {
|
|
|
|
pipeline(iid: "#{pipeline.iid}") {
|
2021-10-27 15:23:28 +05:30
|
|
|
stages {
|
2021-02-22 17:27:13 +05:30
|
|
|
nodes {
|
2021-10-27 15:23:28 +05:30
|
|
|
groups{
|
2021-02-22 17:27:13 +05:30
|
|
|
nodes {
|
2021-10-27 15:23:28 +05:30
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
artifacts {
|
|
|
|
nodes {
|
|
|
|
downloadPath
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-02-22 17:27:13 +05:30
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the job is a build' do
|
|
|
|
it "returns the build's artifacts" do
|
|
|
|
create(:ci_build, :artifacts, pipeline: pipeline)
|
|
|
|
|
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
job_data = graphql_data_at(:project, :pipeline, :stages, :nodes, :groups, :nodes, :jobs, :nodes).first
|
2021-02-22 17:27:13 +05:30
|
|
|
expect(job_data.dig('artifacts', 'nodes').count).to be(2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the job is not a build' do
|
|
|
|
it 'returns nil' do
|
|
|
|
create(:ci_bridge, pipeline: pipeline)
|
|
|
|
|
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
|
2021-10-27 15:23:28 +05:30
|
|
|
job_data = graphql_data_at(:project, :pipeline, :stages, :nodes, :groups, :nodes, :jobs, :nodes).first
|
2021-02-22 17:27:13 +05:30
|
|
|
expect(job_data['artifacts']).to be_nil
|
|
|
|
end
|
|
|
|
end
|
2023-05-27 22:25:52 +05:30
|
|
|
end
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
describe '.jobs.runnerManager' do
|
2023-05-27 22:25:52 +05:30
|
|
|
let_it_be(:admin) { create(:admin) }
|
2023-06-20 00:43:36 +05:30
|
|
|
let_it_be(:runner_manager) { create(:ci_runner_machine, created_at: Time.current, contacted_at: Time.current) }
|
2023-05-27 22:25:52 +05:30
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
let_it_be(:build) do
|
2023-06-20 00:43:36 +05:30
|
|
|
create(:ci_build, pipeline: pipeline, name: 'my test job', runner_manager: runner_manager)
|
2023-05-27 22:25:52 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
project(fullPath: "#{project.full_path}") {
|
|
|
|
pipeline(iid: "#{pipeline.iid}") {
|
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
id
|
|
|
|
name
|
2023-06-20 00:43:36 +05:30
|
|
|
runnerManager {
|
|
|
|
#{all_graphql_fields_for('CiRunnerManager', excluded: [:runner], max_depth: 1)}
|
2023-05-27 22:25:52 +05:30
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:jobs_graphql_data) { graphql_data_at(:project, :pipeline, :jobs, :nodes) }
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
it 'returns the runner manager in each job of a pipeline' do
|
2023-05-27 22:25:52 +05:30
|
|
|
post_graphql(query, current_user: admin)
|
|
|
|
|
|
|
|
expect(jobs_graphql_data).to contain_exactly(
|
|
|
|
a_graphql_entity_for(
|
|
|
|
build,
|
|
|
|
name: build.name,
|
2023-06-20 00:43:36 +05:30
|
|
|
runner_manager: a_graphql_entity_for(
|
|
|
|
runner_manager,
|
|
|
|
system_id: runner_manager.system_xid,
|
|
|
|
created_at: runner_manager.created_at.iso8601,
|
|
|
|
contacted_at: runner_manager.contacted_at.iso8601,
|
|
|
|
status: runner_manager.status.to_s.upcase
|
2023-05-27 22:25:52 +05:30
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
|
|
|
|
admin2 = create(:admin)
|
|
|
|
|
|
|
|
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
|
|
|
|
post_graphql(query, current_user: admin)
|
|
|
|
end
|
|
|
|
|
2023-06-20 00:43:36 +05:30
|
|
|
runner_manager2 = create(:ci_runner_machine)
|
|
|
|
create(:ci_build, pipeline: pipeline, name: 'my test job2', runner_manager: runner_manager2)
|
2023-05-27 22:25:52 +05:30
|
|
|
|
|
|
|
expect { post_graphql(query, current_user: admin2) }.not_to exceed_all_query_limit(control)
|
|
|
|
end
|
2021-02-22 17:27:13 +05:30
|
|
|
end
|
2022-07-23 23:45:48 +05:30
|
|
|
|
|
|
|
describe '.jobs.count' do
|
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
let_it_be(:successful_job) { create(:ci_build, :success, pipeline: pipeline) }
|
|
|
|
let_it_be(:pending_job) { create(:ci_build, :pending, pipeline: pipeline) }
|
|
|
|
let_it_be(:failed_job) { create(:ci_build, :failed, pipeline: pipeline) }
|
|
|
|
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
project(fullPath: "#{project.full_path}") {
|
|
|
|
pipeline(iid: "#{pipeline.iid}") {
|
|
|
|
jobs {
|
|
|
|
count
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the number of jobs' do
|
|
|
|
expect(graphql_data_at(:project, :pipeline, :jobs, :count)).to eq(3)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with limit value' do
|
|
|
|
let(:limit) { 1 }
|
|
|
|
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
project(fullPath: "#{project.full_path}") {
|
|
|
|
pipeline(iid: "#{pipeline.iid}") {
|
|
|
|
jobs {
|
|
|
|
count(limit: #{limit})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a limited number of jobs' do
|
|
|
|
expect(graphql_data_at(:project, :pipeline, :jobs, :count)).to eq(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with invalid value' do
|
|
|
|
let(:limit) { 1500 }
|
|
|
|
|
|
|
|
it 'returns a validation error' do
|
|
|
|
expect(graphql_errors).to include(a_hash_including('message' => 'limit must be less than or equal to 1000'))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with jobs filter' do
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
project(fullPath: "#{project.full_path}") {
|
|
|
|
jobs(statuses: FAILED) {
|
|
|
|
count
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the number of failed jobs' do
|
|
|
|
expect(graphql_data_at(:project, :jobs, :count)).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2022-10-11 01:57:18 +05:30
|
|
|
|
|
|
|
context 'when querying jobs for multiple projects' do
|
|
|
|
let(:query) do
|
|
|
|
%(
|
|
|
|
query {
|
|
|
|
projects {
|
|
|
|
nodes {
|
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
create_list(:project, 2).each do |project|
|
|
|
|
project.add_developer(user)
|
|
|
|
create(:ci_build, project: project)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an error' do
|
|
|
|
post_graphql(query, current_user: user)
|
|
|
|
|
|
|
|
expect_graphql_errors_to_include [/"jobs" field can be requested only for 1 Project\(s\) at a time./]
|
|
|
|
end
|
|
|
|
end
|
2023-03-04 22:38:38 +05:30
|
|
|
|
|
|
|
context 'when batched querying jobs for multiple projects' do
|
|
|
|
let(:batched) do
|
|
|
|
[
|
|
|
|
{ query: query_1 },
|
|
|
|
{ query: query_2 }
|
|
|
|
]
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:query_1) do
|
|
|
|
%(
|
|
|
|
query Page1 {
|
|
|
|
projects {
|
|
|
|
nodes {
|
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:query_2) do
|
|
|
|
%(
|
|
|
|
query Page2 {
|
|
|
|
projects {
|
|
|
|
nodes {
|
|
|
|
jobs {
|
|
|
|
nodes {
|
|
|
|
name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
create_list(:project, 2).each do |project|
|
|
|
|
project.add_developer(user)
|
|
|
|
create(:ci_build, project: project)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'limits the specific field evaluation per query' do
|
|
|
|
get_multiplex(batched, current_user: user)
|
|
|
|
|
|
|
|
resp = json_response
|
|
|
|
|
|
|
|
expect(resp.first.dig('data', 'projects', 'nodes').first.dig('jobs', 'nodes').first['name']).to eq('test')
|
|
|
|
expect(resp.first['errors'].first['message'])
|
|
|
|
.to match(/"jobs" field can be requested only for 1 Project\(s\) at a time./)
|
|
|
|
expect(resp.second.dig('data', 'projects', 'nodes').first.dig('jobs', 'nodes').first['name']).to eq('test')
|
|
|
|
expect(resp.second['errors'].first['message'])
|
|
|
|
.to match(/"jobs" field can be requested only for 1 Project\(s\) at a time./)
|
|
|
|
end
|
|
|
|
end
|
2020-10-24 23:57:45 +05:30
|
|
|
end
|