diff --git a/app/graphql/resolvers/ci/job_analytics_resolver.rb b/app/graphql/resolvers/ci/job_analytics_resolver.rb
index 33e0b74fe7bd0c9dbe77f3221351d529802333e3..be074802ea4e188d7776e9b27d3337379ee5ee9e 100644
--- a/app/graphql/resolvers/ci/job_analytics_resolver.rb
+++ b/app/graphql/resolvers/ci/job_analytics_resolver.rb
@@ -8,17 +8,7 @@ class JobAnalyticsResolver < BaseResolver
authorizes_object!
authorize :read_ci_cd_analytics
- argument :select_fields,
- [Types::Ci::JobAnalyticsFieldEnum],
- required: true,
- default_value: [:name],
- description: 'Fields to select and group by.'
-
- argument :aggregations,
- [Types::Ci::JobAnalyticsAggregationEnum],
- required: true,
- default_value: [:mean_duration_in_seconds, :rate_of_failed, :p95_duration_in_seconds],
- description: 'Aggregation functions to apply.'
+ extras [:lookahead]
argument :name_search,
GraphQL::Types::String,
@@ -48,7 +38,11 @@ class JobAnalyticsResolver < BaseResolver
description:
'End of the requested time (in UTC). Defaults to the pipelines started before the current date.'
- def resolve(**args)
+ def resolve(lookahead:, **args)
+ nodes = node_selection(lookahead)
+ args[:select_fields] = detect_select_fields(nodes)
+ args[:aggregations] = detect_aggregations(nodes, args[:sort])
+
query_builder = ::Ci::JobAnalytics::QueryBuilder.new(
project: project,
current_user: current_user,
@@ -67,6 +61,60 @@ def resolve(**args)
def project
object.respond_to?(:sync) ? object.sync : object
end
+
+ def detect_select_fields(nodes)
+ return [:name] unless nodes&.selected?
+
+ fields = nodes.selections.map(&:name) & ClickHouse::Finders::Ci::FinishedBuildsFinder::ALLOWED_TO_SELECT
+
+ fields.empty? ? [:name] : fields
+ end
+
+ def detect_aggregations(nodes, sort = nil)
+ aggregations = extract_sort_aggregations(sort)
+
+ return aggregations unless nodes&.selected? && nodes.selects?(:statistics)
+
+ statistics = nodes.selection(:statistics)
+ aggregations
+ .concat(extract_duration_aggregations(statistics))
+ .concat(extract_status_aggregations(statistics)) &
+ ClickHouse::Finders::Ci::FinishedBuildsFinder::ALLOWED_AGGREGATIONS
+ end
+
+ def extract_duration_aggregations(statistics)
+ return [] unless statistics.selects?(:duration_statistics)
+
+ statistics.selection(:duration_statistics).selections.map(&:name)
+ end
+
+ def extract_status_aggregations(statistics)
+ statistics.selections.filter_map do |s|
+ status = s.arguments[:status]
+ next if status == :other # not supported yet
+
+ case s.name
+ when :count
+ status.nil? || status == :any ? :total_count : :"count_#{status}"
+ when :rate
+ :"rate_of_#{status}" unless status.nil? || status == :any
+ end
+ end
+ end
+
+ # Sorting by an aggregation field requires that field to be included in the aggregations.
+ # The 'name' field is always selected by default, so sorting by name needs no additional aggregation.
+ def extract_sort_aggregations(sort)
+ return [] if sort.nil? || sort.start_with?('name_')
+
+ [sort.to_s.sub(/_(?:asc|desc)$/, '').to_sym]
+ end
+
+ def node_selection(lookahead)
+ return lookahead unless lookahead&.selected?
+
+ lookahead.selects?(:edges) ? lookahead.selection(:edges).selection(:node) : lookahead.selection(:nodes)
+ end
end
end
end
diff --git a/app/graphql/types/ci/job_analytics_aggregation_enum.rb b/app/graphql/types/ci/job_analytics_aggregation_enum.rb
deleted file mode 100644
index 7a5938ab01b7a42bdcc3a1db89e26cdbb72e1f23..0000000000000000000000000000000000000000
--- a/app/graphql/types/ci/job_analytics_aggregation_enum.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-module Types
- module Ci
- class JobAnalyticsAggregationEnum < BaseEnum
- graphql_name 'CiJobAnalyticsAggregation'
- description 'Aggregation functions available for CI/CD job analytics'
-
- value 'MEAN_DURATION_IN_SECONDS',
- value: :mean_duration_in_seconds,
- description: 'Average duration of jobs in seconds.'
-
- value 'P95_DURATION_IN_SECONDS',
- value: :p95_duration_in_seconds,
- description: '95th percentile duration of jobs in seconds.'
-
- value 'RATE_OF_SUCCESS',
- value: :rate_of_success,
- description: 'Percentage of successful jobs.'
-
- value 'RATE_OF_FAILED',
- value: :rate_of_failed,
- description: 'Percentage of failed jobs.'
-
- value 'RATE_OF_CANCELED',
- value: :rate_of_canceled,
- description: 'Percentage of canceled jobs.'
- end
- end
-end
diff --git a/app/graphql/types/ci/job_analytics_field_enum.rb b/app/graphql/types/ci/job_analytics_field_enum.rb
deleted file mode 100644
index 4d1b841cb989b6002a3047981103d396ea443a31..0000000000000000000000000000000000000000
--- a/app/graphql/types/ci/job_analytics_field_enum.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Types
- module Ci
- class JobAnalyticsFieldEnum < BaseEnum
- graphql_name 'CiJobAnalyticsField'
- description 'Fields available for selection in CI/CD job analytics'
-
- value 'NAME', value: :name, description: 'Job name.'
- value 'STAGE', value: :stage_id, description: 'Stage.'
- end
- end
-end
diff --git a/app/graphql/types/ci/job_analytics_sort_enum.rb b/app/graphql/types/ci/job_analytics_sort_enum.rb
index 3e0080704d936b1c07c3aac6060dcb900e5b6e54..4665700ec4c3630edb543f868e22b0df70ce2ca4 100644
--- a/app/graphql/types/ci/job_analytics_sort_enum.rb
+++ b/app/graphql/types/ci/job_analytics_sort_enum.rb
@@ -11,19 +11,51 @@ class JobAnalyticsSortEnum < BaseEnum
value 'MEAN_DURATION_ASC',
'Sort by mean duration in ascending order.',
- value: :mean_duration_in_seconds_asc
+ value: :mean_asc
value 'MEAN_DURATION_DESC',
'Sort by mean duration in descending order.',
- value: :mean_duration_in_seconds_desc
+ value: :mean_desc
+
+ value 'P50_DURATION_ASC',
+ 'Sort by 50th percentile duration in ascending order.',
+ value: :p50_asc
+
+ value 'P50_DURATION_DESC',
+ 'Sort by 50th percentile duration in descending order.',
+ value: :p50_desc
+
+ value 'P75_DURATION_ASC',
+ 'Sort by 75th percentile duration in ascending order.',
+ value: :p75_asc
+
+ value 'P75_DURATION_DESC',
+ 'Sort by 75th percentile duration in descending order.',
+ value: :p75_desc
+
+ value 'P90_DURATION_ASC',
+ 'Sort by 90th percentile duration in ascending order.',
+ value: :p90_asc
+
+ value 'P90_DURATION_DESC',
+ 'Sort by 90th percentile duration in descending order.',
+ value: :p90_desc
value 'P95_DURATION_ASC',
'Sort by 95th percentile duration in ascending order.',
- value: :p95_duration_in_seconds_asc
+ value: :p95_asc
value 'P95_DURATION_DESC',
'Sort by 95th percentile duration in descending order.',
- value: :p95_duration_in_seconds_desc
+ value: :p95_desc
+
+ value 'P99_DURATION_ASC',
+ 'Sort by 99th percentile duration in ascending order.',
+ value: :p99_asc
+
+ value 'P99_DURATION_DESC',
+ 'Sort by 99th percentile duration in descending order.',
+ value: :p99_desc
value 'SUCCESS_RATE_ASC',
'Sort by success rate in ascending order.',
@@ -48,6 +80,54 @@ class JobAnalyticsSortEnum < BaseEnum
value 'CANCELED_RATE_DESC',
'Sort by canceled rate in descending order.',
value: :rate_of_canceled_desc
+
+ value 'OTHER_RATE_ASC',
+ 'Sort by other (canceled or skipped) rate in ascending order.',
+ value: :rate_of_other_asc
+
+ value 'OTHER_RATE_DESC',
+ 'Sort by other (canceled or skipped) rate in descending order.',
+ value: :rate_of_other_desc
+
+ value 'COUNT_SUCCESS_ASC',
+ 'Sort by success count in ascending order.',
+ value: :count_success_asc
+
+ value 'COUNT_SUCCESS_DESC',
+ 'Sort by success count in descending order.',
+ value: :count_success_desc
+
+ value 'COUNT_FAILED_ASC',
+ 'Sort by failed count in ascending order.',
+ value: :count_failed_asc
+
+ value 'COUNT_FAILED_DESC',
+ 'Sort by failed count in descending order.',
+ value: :count_failed_desc
+
+ value 'COUNT_CANCELED_ASC',
+ 'Sort by canceled count in ascending order.',
+ value: :count_canceled_asc
+
+ value 'COUNT_CANCELED_DESC',
+ 'Sort by canceled count in descending order.',
+ value: :count_canceled_desc
+
+ value 'COUNT_OTHER_ASC',
+ 'Sort by other (canceled or skipped) count in ascending order.',
+ value: :count_other_asc
+
+ value 'COUNT_OTHER_DESC',
+ 'Sort by other (canceled or skipped) count in descending order.',
+ value: :count_other_desc
+
+ value 'TOTAL_COUNT_ASC',
+ 'Sort by total count in ascending order.',
+ value: :total_count_asc
+
+ value 'TOTAL_COUNT_DESC',
+ 'Sort by total count in descending order.',
+ value: :total_count_desc
end
end
end
diff --git a/app/graphql/types/ci/job_analytics_type.rb b/app/graphql/types/ci/job_analytics_type.rb
index 7f022e9178e74629829427cb4d3a85cffe4f2f12..81727425392f7d53de76eb28e1f68a03df42f10a 100644
--- a/app/graphql/types/ci/job_analytics_type.rb
+++ b/app/graphql/types/ci/job_analytics_type.rb
@@ -14,31 +14,6 @@ class JobAnalyticsType < BaseObject # rubocop:disable Graphql/AuthorizeTypes --
null: true,
description: 'Stage information.'
- field :mean_duration_in_seconds, GraphQL::Types::Float,
- null: true,
- description: 'Average duration of jobs in seconds.'
-
- field :p95_duration_in_seconds, GraphQL::Types::Float,
- null: true,
- description: '95th percentile duration of jobs in seconds.'
-
- # rubocop:disable GraphQL/ExtractType -- this type is based on hash data, not an ActiveRecord model
- # So extracting to a separate type makes it difficult for both code and the UX
-
- field :rate_of_success, GraphQL::Types::Float,
- null: true,
- description: 'Percentage of successful jobs.'
-
- field :rate_of_failed, GraphQL::Types::Float,
- null: true,
- description: 'Percentage of failed jobs.'
-
- field :rate_of_canceled, GraphQL::Types::Float,
- null: true,
- description: 'Percentage of canceled jobs.'
-
- # rubocop:enable GraphQL/ExtractType
-
field :statistics, Types::Ci::JobAnalyticsStatisticsType,
null: true,
description: 'Statistics for the jobs.'
diff --git a/doc/api/graphql/reference/_index.md b/doc/api/graphql/reference/_index.md
index 1cc787520608c79f91d80cf7037b730c61803e4e..4c8c73c9d3863f83b5c5735a9cf5333e272034a5 100644
--- a/doc/api/graphql/reference/_index.md
+++ b/doc/api/graphql/reference/_index.md
@@ -27602,12 +27602,7 @@ CI/CD job analytics data.
| Name | Type | Description |
| ---- | ---- | ----------- |
-| `meanDurationInSeconds` | [`Float`](#float) | Average duration of jobs in seconds. |
| `name` | [`String`](#string) | Job name. |
-| `p95DurationInSeconds` | [`Float`](#float) | 95th percentile duration of jobs in seconds. |
-| `rateOfCanceled` | [`Float`](#float) | Percentage of canceled jobs. |
-| `rateOfFailed` | [`Float`](#float) | Percentage of failed jobs. |
-| `rateOfSuccess` | [`Float`](#float) | Percentage of successful jobs. |
| `stage` | [`CiStage`](#cistage) | Stage information. |
| `statistics` | [`CiJobAnalyticsStatistics`](#cijobanalyticsstatistics) | Statistics for the jobs. |
@@ -42916,11 +42911,9 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
-| `aggregations` | [`[CiJobAnalyticsAggregation!]!`](#cijobanalyticsaggregation) | Aggregation functions to apply. |
| `fromTime` | [`Time`](#time) | Start of the requested time (in UTC). Defaults to the pipelines started in the past week. |
| `nameSearch` | [`String`](#string) | Search by name of the pipeline jobs. Supports partial matches. |
| `ref` | [`String`](#string) | Branch that triggered the pipeline. |
-| `selectFields` | [`[CiJobAnalyticsField!]!`](#cijobanalyticsfield) | Fields to select and group by. |
| `sort` | [`CiJobAnalyticsSort`](#cijobanalyticssort) | Sort order for the results. |
| `source` | [`CiPipelineSources`](#cipipelinesources) | Source of the pipeline. |
| `toTime` | [`Time`](#time) | End of the requested time (in UTC). Defaults to the pipelines started before the current date. |
@@ -50747,27 +50740,6 @@ Available input types.
| `NUMBER` | Number input. |
| `STRING` | String input. |
-### `CiJobAnalyticsAggregation`
-
-Aggregation functions available for CI/CD job analytics.
-
-| Value | Description |
-| ----- | ----------- |
-| `MEAN_DURATION_IN_SECONDS` | Average duration of jobs in seconds. |
-| `P95_DURATION_IN_SECONDS` | 95th percentile duration of jobs in seconds. |
-| `RATE_OF_CANCELED` | Percentage of canceled jobs. |
-| `RATE_OF_FAILED` | Percentage of failed jobs. |
-| `RATE_OF_SUCCESS` | Percentage of successful jobs. |
-
-### `CiJobAnalyticsField`
-
-Fields available for selection in CI/CD job analytics.
-
-| Value | Description |
-| ----- | ----------- |
-| `NAME` | Job name. |
-| `STAGE` | Stage. |
-
### `CiJobAnalyticsSort`
Values for sorting CI/CD job analytics.
@@ -50776,16 +50748,36 @@ Values for sorting CI/CD job analytics.
| ----- | ----------- |
| `CANCELED_RATE_ASC` | Sort by canceled rate in ascending order. |
| `CANCELED_RATE_DESC` | Sort by canceled rate in descending order. |
+| `COUNT_CANCELED_ASC` | Sort by canceled count in ascending order. |
+| `COUNT_CANCELED_DESC` | Sort by canceled count in descending order. |
+| `COUNT_FAILED_ASC` | Sort by failed count in ascending order. |
+| `COUNT_FAILED_DESC` | Sort by failed count in descending order. |
+| `COUNT_OTHER_ASC` | Sort by other (canceled or skipped) count in ascending order. |
+| `COUNT_OTHER_DESC` | Sort by other (canceled or skipped) count in descending order. |
+| `COUNT_SUCCESS_ASC` | Sort by success count in ascending order. |
+| `COUNT_SUCCESS_DESC` | Sort by success count in descending order. |
| `FAILED_RATE_ASC` | Sort by failed rate in ascending order. |
| `FAILED_RATE_DESC` | Sort by failed rate in descending order. |
| `MEAN_DURATION_ASC` | Sort by mean duration in ascending order. |
| `MEAN_DURATION_DESC` | Sort by mean duration in descending order. |
| `NAME_ASC` | Sort by name in ascending order. |
| `NAME_DESC` | Sort by name in descending order. |
+| `OTHER_RATE_ASC` | Sort by other (canceled or skipped) rate in ascending order. |
+| `OTHER_RATE_DESC` | Sort by other (canceled or skipped) rate in descending order. |
+| `P50_DURATION_ASC` | Sort by 50th percentile duration in ascending order. |
+| `P50_DURATION_DESC` | Sort by 50th percentile duration in descending order. |
+| `P75_DURATION_ASC` | Sort by 75th percentile duration in ascending order. |
+| `P75_DURATION_DESC` | Sort by 75th percentile duration in descending order. |
+| `P90_DURATION_ASC` | Sort by 90th percentile duration in ascending order. |
+| `P90_DURATION_DESC` | Sort by 90th percentile duration in descending order. |
| `P95_DURATION_ASC` | Sort by 95th percentile duration in ascending order. |
| `P95_DURATION_DESC` | Sort by 95th percentile duration in descending order. |
+| `P99_DURATION_ASC` | Sort by 99th percentile duration in ascending order. |
+| `P99_DURATION_DESC` | Sort by 99th percentile duration in descending order. |
| `SUCCESS_RATE_ASC` | Sort by success rate in ascending order. |
| `SUCCESS_RATE_DESC` | Sort by success rate in descending order. |
+| `TOTAL_COUNT_ASC` | Sort by total count in ascending order. |
+| `TOTAL_COUNT_DESC` | Sort by total count in descending order. |
### `CiJobFailureReason`
diff --git a/lib/ci/job_analytics/query_builder.rb b/lib/ci/job_analytics/query_builder.rb
index 1638c05aa3a8691f82bc366d87d7c2e8b7b45dc1..882d35bd8b7630f6902d440b0681be065deefba1 100644
--- a/lib/ci/job_analytics/query_builder.rb
+++ b/lib/ci/job_analytics/query_builder.rb
@@ -62,6 +62,10 @@ def build_finder
def extract_sort_info(value)
value.match(/(?.*)_(?.*)/) => {field:, dir:}
+ # Map duration fields to their actual column names (without _duration suffix)
+ # e.g., mean_duration -> mean, p50_duration -> p50, p75_duration -> p75, etc.
+ field = field.sub(/^(mean|p\d+)_duration$/, '\1') if field.match?(/^(mean|p\d+)_duration$/)
+
[field.to_sym, dir.to_sym]
end
end
diff --git a/lib/click_house/finders/ci/finished_builds_finder.rb b/lib/click_house/finders/ci/finished_builds_finder.rb
index bcf64e3aafcc0b519501956025d2bb7c162ed07e..4e9ed70161b110e58aa64f5ef72af2b7c1538057 100644
--- a/lib/click_house/finders/ci/finished_builds_finder.rb
+++ b/lib/click_house/finders/ci/finished_builds_finder.rb
@@ -10,21 +10,24 @@ class FinishedBuildsFinder < ::ClickHouse::Models::BaseModel
ALLOWED_TO_GROUP = %i[name stage_id].freeze
ALLOWED_TO_SELECT = %i[name stage_id].freeze
ALLOWED_AGGREGATIONS = %i[
+ mean
mean_duration_in_seconds
- p50_duration
- p75_duration
- p90_duration
- p95_duration
- p99_duration
+ p50
+ p75
+ p90
+ p95
+ p99
p95_duration_in_seconds
rate_of_success
rate_of_failed
rate_of_canceled
rate_of_skipped
+ rate_of_other
count_success
count_failed
count_canceled
count_skipped
+ count_other
total_count
].freeze
ALLOWED_TO_ORDER = (ALLOWED_TO_SELECT + ALLOWED_AGGREGATIONS).freeze
@@ -91,6 +94,15 @@ def mean_duration_in_seconds
)
end
+ def mean
+ select(
+ round(
+ ms_to_s(query_builder.avg(:duration))
+ ).as('mean'),
+ group_by_fields: false
+ )
+ end
+
def p95_duration_in_seconds
select(
round(
@@ -135,8 +147,23 @@ def total_count
end
end
+ # 'other' status - (canceled + skipped)
+ def rate_of_other
+ select(
+ build_rate_aggregate_other,
+ group_by_fields: false
+ )
+ end
+
+ def count_other
+ select(
+ build_count_aggregate_other.as('count_other'),
+ group_by_fields: false
+ )
+ end
+
ALLOWED_PERCENTILES.each do |percentile|
- define_method(:"p#{percentile}_duration") do
+ define_method(:"p#{percentile}") do
duration_of_percentile(percentile)
end
end
@@ -194,11 +221,28 @@ def build_count_aggregate(status)
)
end
+ def build_rate_aggregate_other
+ other_count = query_builder.count_if(
+ query_builder.table[:status].in(%w[canceled skipped])
+ )
+
+ percentage = query_builder.division(other_count, query_builder.count)
+ percentage_value = query_builder.multiply(percentage, 100)
+
+ round(percentage_value).as('rate_of_other')
+ end
+
+ def build_count_aggregate_other
+ query_builder.count_if(
+ query_builder.table[:status].in(%w[canceled skipped])
+ )
+ end
+
def duration_of_percentile(percentile)
select(
round(
ms_to_s(query_builder.quantile(percentile.to_f / 100.0, :duration))
- ).as("p#{percentile}_duration"),
+ ).as("p#{percentile}"),
group_by_fields: false
)
end
diff --git a/spec/graphql/resolvers/ci/job_analytics_resolver_spec.rb b/spec/graphql/resolvers/ci/job_analytics_resolver_spec.rb
index 967f862e232984b34cf83a69dd0309bdcdd75571..d4ae913487c390ee23955a6ad895f975255a93ac 100644
--- a/spec/graphql/resolvers/ci/job_analytics_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/job_analytics_resolver_spec.rb
@@ -4,236 +4,333 @@
RSpec.describe Resolvers::Ci::JobAnalyticsResolver, :click_house, feature_category: :fleet_visibility do
include GraphqlHelpers
- include_context 'with CI job analytics test data', with_pipelines: false
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be(:project, freeze: true) { create(:project) }
- describe '#resolve' do
- subject(:resolve_scope) do
- resolve(
- described_class,
- obj: project,
- ctx: { current_user: user },
- args: args,
- arg_style: :internal
- )
- end
+ let(:lookahead) { negative_lookahead }
+ let(:resolver_args) { {} }
+ let(:expected_options) { {} }
+
+ before do
+ stub_application_setting(use_clickhouse_for_analytics: true)
+ end
- let(:query_builder) do
- instance_double(::Ci::JobAnalytics::QueryBuilder,
- execute: ClickHouse::Finders::Ci::FinishedBuildsFinder.new)
+ specify { expect(described_class.extras).to include(:lookahead) }
+
+ shared_examples 'builds query with expected options' do
+ it 'passes expected options to QueryBuilder' do
+ expect(::Ci::JobAnalytics::QueryBuilder).to receive(:new).with(
+ project: project,
+ current_user: user,
+ options: hash_including(expected_options)
+ ).and_call_original
+
+ resolve_job_analytics(project, resolver_args)
end
+ end
- let_it_be_with_reload(:user) { create(:user) }
- let(:args) { {} }
+ def resolve_job_analytics(container, args = {})
+ resolve(
+ described_class,
+ obj: container,
+ args: args,
+ ctx: { current_user: user },
+ lookahead: lookahead,
+ arg_style: :internal
+ )
+ end
- before do
- stub_application_setting(use_clickhouse_for_analytics: true)
+ describe '#resolve' do
+ before_all do
+ project.add_maintainer(user)
end
- context 'when user does not have permission to read_ci_cd_analytics' do
- it { is_expected.to be_nil }
+ context 'when user does not have permission' do
+ let(:user) { create(:user) }
+
+ it 'returns nil' do
+ expect(resolve_job_analytics(project)).to be_nil
+ end
end
- context 'when user has permission to read_ci_cd_analytics' do
- before_all do
- project.add_maintainer(user)
+ context 'when user has permission' do
+ it 'returns ClickHouseAggregatedConnection' do
+ expect(resolve_job_analytics(project)).to be_a(::Gitlab::Graphql::Pagination::ClickHouseAggregatedConnection)
end
- context 'with default arguments' do
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:mean_duration_in_seconds, :rate_of_failed, :p95_duration_in_seconds]
- }
- end
+ context 'with negative lookahead' do
+ let(:expected_options) { { select_fields: [:name], aggregations: [] } }
- it 'returns ClickHouseAggregatedConnection' do
- expect(resolve_scope).to be_a(::Gitlab::Graphql::Pagination::ClickHouseAggregatedConnection)
+ include_examples 'builds query with expected options'
+ end
+
+ context 'with lookahead detecting fields' do
+ let(:lookahead) { build_lookahead }
+
+ context 'when selecting multiple fields' do
+ before do
+ stub_selections(:name, :stage_id)
+ end
+
+ let(:expected_options) { { select_fields: [:name, :stage_id] } }
+
+ include_examples 'builds query with expected options'
end
- it 'calls QueryBuilderService with correct parameters' do
- expect(::Ci::JobAnalytics::QueryBuilder).to receive(:new).with(project: project,
- current_user: user,
- options: args).and_return(query_builder)
+ context 'when selecting fields not in ALLOWED_TO_SELECT' do
+ before do
+ stub_selections(:invalid_field, :another_invalid)
+ end
+
+ let(:expected_options) { { select_fields: [:name] } }
- resolve_scope
+ include_examples 'builds query with expected options'
end
- end
- shared_examples 'resolves with filtering' do
- it "correctly configures the QueryBuilder" do
- expect(::Ci::JobAnalytics::QueryBuilder).to receive(:new).with(
- project: project,
- current_user: user,
- options: hash_including(expected_params)
- ).and_return(query_builder)
+ context 'when selecting duration aggregations' do
+ before do
+ stub_selections(:name, statistics: { duration_statistics: [:mean, :p50, :p90, :p99] })
+ end
+
+ let(:expected_options) { { aggregations: contain_exactly(:mean, :p50, :p90, :p99) } }
- is_expected.to be_a(::Gitlab::Graphql::Pagination::ClickHouseAggregatedConnection)
+ include_examples 'builds query with expected options'
end
- end
- context 'with name search filter' do
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
- name_search: 'compile'
- }
+ context 'when selecting count aggregations with specific statuses' do
+ before do
+ stub_selections(:name, statistics: {
+ count: [{ status: :success }, { status: :failed }]
+ })
+ end
+
+ let(:expected_options) { { aggregations: contain_exactly(:count_success, :count_failed) } }
+
+ include_examples 'builds query with expected options'
end
- let(:expected_params) { { name_search: 'compile' } }
+ context 'when selecting count without status argument' do
+ before do
+ stub_selections(:name, statistics: { count: [{}] })
+ end
- include_examples 'resolves with filtering'
- end
+ let(:expected_options) { { aggregations: contain_exactly(:total_count) } }
- context 'with source filter' do
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:rate_of_success],
- source: 'web'
- }
+ include_examples 'builds query with expected options'
end
- let(:expected_params) { { source: 'web' } }
+ context 'when selecting count with any status' do
+ before do
+ stub_selections(:name, statistics: { count: [{ status: :any }] })
+ end
- include_examples 'resolves with filtering'
- end
+ let(:expected_options) { { aggregations: contain_exactly(:total_count) } }
- context 'with ref filter' do
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
- ref: 'feature-branch'
- }
+ include_examples 'builds query with expected options'
end
- let(:expected_params) { { ref: 'feature-branch' } }
+ context 'when selecting rate aggregations' do
+ before do
+ stub_selections(:name, statistics: { rate: [{ status: :failed }] })
+ end
- include_examples 'resolves with filtering'
- end
+ let(:expected_options) { { aggregations: contain_exactly(:rate_of_failed) } }
- context 'with time range filters' do
- let(:from_time) { 24.hours.ago }
- let(:to_time) { Time.current }
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
- from_time: from_time,
- to_time: to_time
- }
+ include_examples 'builds query with expected options'
end
- let(:expected_params) { { from_time: from_time, to_time: to_time } }
+ context 'when selecting rate with any status' do
+ before do
+ stub_selections(:name, statistics: { rate: [{ status: :any }] })
+ end
- include_examples 'resolves with filtering'
- end
+ let(:expected_options) { { aggregations: [] } }
- context 'with sort parameter' do
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
- sort: :mean_duration_in_seconds_desc
- }
+ include_examples 'builds query with expected options'
end
- let(:expected_params) { { sort: :mean_duration_in_seconds_desc } }
+ context 'when selecting rate without status argument' do
+ before do
+ stub_selections(:name, statistics: { rate: [{}] })
+ end
- include_examples 'resolves with filtering'
- end
+ let(:expected_options) { { aggregations: [] } }
- context 'with multiple select fields' do
- let(:args) do
- {
- select_fields: [:name, :stage_id],
- aggregations: [:mean_duration_in_seconds, :rate_of_success, :rate_of_failed]
- }
+ include_examples 'builds query with expected options'
end
- let(:expected_params) { args }
+ context 'when selecting count and rate with other status' do
+ before do
+ stub_selections(:name, statistics: {
+ count: [{ status: :other }],
+ rate: [{ status: :other }]
+ })
+ end
- include_examples 'resolves with filtering'
- end
+ let(:expected_options) { { aggregations: [] } }
- context 'with all aggregation types' do
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [
- :mean_duration_in_seconds,
- :p95_duration_in_seconds,
- :rate_of_success,
- :rate_of_failed,
- :rate_of_canceled
- ]
- }
+ include_examples 'builds query with expected options'
end
- let(:expected_params) { args }
+ context 'when selecting combined aggregations' do
+ before do
+ stub_selections(:name, statistics: {
+ duration_statistics: [:mean, :p95],
+ count: [{ status: :success }],
+ rate: [{ status: :failed }]
+ })
+ end
+
+ let(:expected_options) { { aggregations: contain_exactly(:mean, :p95, :count_success, :rate_of_failed) } }
- include_examples 'resolves with filtering'
+ include_examples 'builds query with expected options'
+ end
end
- context 'with combined filters and sorting' do
- let(:args) do
+ context 'with filter arguments' do
+ let(:from_time) { 1.week.ago }
+ let(:to_time) { Time.current }
+ let(:resolver_args) do
{
- select_fields: [:name, :stage_id],
- aggregations: [:mean_duration_in_seconds, :rate_of_failed],
name_search: 'rspec',
- source: 'push',
ref: 'main',
- sort: :rate_of_failed_desc,
- from_time: 7.days.ago,
- to_time: Time.current
+ source: 'push',
+ from_time: from_time,
+ to_time: to_time
}
end
- let(:expected_params) { args }
+ let(:expected_options) { resolver_args }
- include_examples 'resolves with filtering'
+ include_examples 'builds query with expected options'
end
- context 'when QueryBuilder raises ArgumentError' do
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:mean_duration_in_seconds]
- }
+ context 'with sort argument' do
+ let(:lookahead) { build_lookahead }
+ let(:resolver_args) { { sort: :mean_desc } }
+ let(:expected_options) { { aggregations: [:mean], sort: :mean_desc } }
+
+ before do
+ stub_selections(:name)
end
+ include_examples 'builds query with expected options'
+ end
+
+ context 'with sort by name' do
+ let(:lookahead) { build_lookahead }
+ let(:resolver_args) { { sort: :name_asc } }
+ let(:expected_options) { { aggregations: [], sort: :name_asc } }
+
before do
- allow(::Ci::JobAnalytics::QueryBuilder).to receive(:new)
- .and_raise(ArgumentError, 'Invalid argument')
+ stub_selections(:name)
end
- it 'raises Gitlab::Graphql::Errors::ArgumentError' do
- expect_graphql_error_to_be_created(
- Gitlab::Graphql::Errors::ArgumentError,
- 'Invalid argument'
- ) do
- resolve_scope
- end
+ include_examples 'builds query with expected options'
+ end
+
+ context 'when using nodes instead of edges' do
+ let(:lookahead) { build_lookahead(use_nodes: true) }
+ let(:expected_options) { { select_fields: [:name, :stage_id] } }
+
+ before do
+ stub_selections(:name, :stage_id)
end
+
+ include_examples 'builds query with expected options'
end
- context 'when ClickHouse is not configured' do
+ context 'when statistics is not selected' do
+ let(:lookahead) { build_lookahead }
+ let(:expected_options) { { aggregations: [] } }
+
before do
- allow(::Gitlab::ClickHouse).to receive(:configured?).and_return(false)
+ stub_selections(:name)
end
- let(:args) do
- {
- select_fields: [:name],
- aggregations: [:mean_duration_in_seconds]
- }
+ include_examples 'builds query with expected options'
+ end
+ end
+
+ context 'when QueryBuilder raises ArgumentError' do
+ before do
+ allow(::Ci::JobAnalytics::QueryBuilder).to receive(:new).and_raise(ArgumentError, 'Invalid argument')
+ end
+
+ it 'raises Gitlab::Graphql::Errors::ArgumentError' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'Invalid argument') do
+ resolve_job_analytics(project)
end
+ end
+ end
- it 'returns resource note available error' do
- expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
- resolve_scope
- end
+ context 'when ClickHouse is not configured' do
+ before do
+ allow(::Gitlab::ClickHouse).to receive(:configured?).and_return(false)
+ end
+
+ it 'raises resource not available error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_job_analytics(project)
+ end
+ end
+ end
+ end
+
+ private
+
+ def build_lookahead(use_nodes: false)
+ instance_double(GraphQL::Execution::Lookahead).tap do |la|
+ allow(la).to receive(:selected?).and_return(true)
+
+ if use_nodes
+ allow(la).to receive(:selects?).with(:edges).and_return(false)
+ allow(la).to receive(:selects?).with(:nodes).and_return(true)
+ allow(la).to receive(:selection).with(:nodes).and_return(la)
+ else
+ allow(la).to receive(:selects?).with(:edges).and_return(true)
+ allow(la).to receive(:selection).with(:edges).and_return(la)
+ end
+ end
+ end
+
+ def stub_selections(*fields)
+ node = build_selection_double(fields)
+ if lookahead.selects?(:edges)
+ allow(lookahead).to receive(:selection).with(:node).and_return(node)
+ else
+ allow(lookahead).to receive(:selection).with(:nodes).and_return(node)
+ end
+ end
+
+ def build_selection_double(fields)
+ fields = Array.wrap(fields)
+ selections = fields.flat_map { |field| build_field_double(field) }
+
+ instance_double(GraphQL::Execution::Lookahead, selected?: true, selections: selections).tap do |node|
+ allow(node).to receive(:selects?).and_return(false)
+
+ fields.each do |field|
+ next unless field.is_a?(Hash)
+
+ field.each do |key, children|
+ allow(node).to receive(:selects?).with(key).and_return(true)
+ allow(node).to receive(:selection).with(key).and_return(build_selection_double(children))
+ end
+ end
+ end
+ end
+
+ def build_field_double(field)
+ case field
+ when Symbol
+ instance_double(GraphQL::Execution::Lookahead, name: field)
+ when Hash
+ field.flat_map do |name, children|
+ next [] unless children.is_a?(Array)
+
+ children.filter_map do |child|
+ instance_double(GraphQL::Execution::Lookahead, name: name, arguments: child) if child.is_a?(Hash)
end
end
end
diff --git a/spec/graphql/types/ci/job_analytics_type_spec.rb b/spec/graphql/types/ci/job_analytics_type_spec.rb
index 0fb7005f7a31a673337317c7fc671c519de5c88a..51e0953362d9ed5ce83f9fac72e3dac941bbd9d3 100644
--- a/spec/graphql/types/ci/job_analytics_type_spec.rb
+++ b/spec/graphql/types/ci/job_analytics_type_spec.rb
@@ -9,11 +9,6 @@
expected_fields = %i[
name
stage
- meanDurationInSeconds
- p95DurationInSeconds
- rateOfSuccess
- rateOfFailed
- rateOfCanceled
statistics
]
diff --git a/spec/lib/ci/job_analytics/query_builder_spec.rb b/spec/lib/ci/job_analytics/query_builder_spec.rb
index 52b998e874078baa8c39a3175b02ff7f5d7fc5d0..bd1446b66fba1d64379347ced21f818eef375b34 100644
--- a/spec/lib/ci/job_analytics/query_builder_spec.rb
+++ b/spec/lib/ci/job_analytics/query_builder_spec.rb
@@ -42,7 +42,7 @@
let(:options) do
{
select_fields: [:name, :stage_id],
- aggregations: [:mean_duration_in_seconds],
+ aggregations: [:mean],
sort: 'name_asc',
source: 'web',
ref: 'main',
@@ -55,7 +55,7 @@
it 'sets all values correctly', :aggregate_failures do
expect(instance.project).to eq(project)
expect(instance.select_fields).to contain_exactly(:name, :stage_id)
- expect(instance.aggregations).to contain_exactly(:mean_duration_in_seconds)
+ expect(instance.aggregations).to contain_exactly(:mean)
expect(instance.sort).to eq('name_asc')
expect(instance.source).to eq('web')
expect(instance.ref).to eq('main')
@@ -95,7 +95,7 @@
end
context 'with aggregations only' do
- let(:options) { { aggregations: [:mean_duration_in_seconds] } }
+ let(:options) { { aggregations: [:mean] } }
it { expect { query_result }.not_to raise_error }
end
@@ -104,13 +104,13 @@
let(:options) do
{
select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
- sort: 'mean_duration_in_seconds_asc'
+ aggregations: [:mean],
+ sort: 'mean_asc'
}
end
it 'builds query with sorting' do
- durations = query_result.pluck('mean_duration_in_seconds')
+ durations = query_result.pluck('mean')
expect(durations).to eq(durations.sort)
end
end
@@ -143,8 +143,8 @@
let(:options) do
{
select_fields: [:name],
- aggregations: [:mean_duration_in_seconds, :rate_of_success],
- sort: 'mean_duration_in_seconds_desc',
+ aggregations: [:mean, :rate_of_success],
+ sort: 'mean_desc',
source: 'push',
ref: 'master',
from_time: 1.day.ago,
@@ -155,7 +155,7 @@
it 'builds complex query successfully', :aggregate_failures do
expect(query_result).to be_a(Array)
- expect(query_result.first.keys).to include('name', 'mean_duration_in_seconds', 'rate_of_success')
+ expect(query_result.first.keys).to include('name', 'mean', 'rate_of_success')
expect(query_result.pluck('name').uniq).to contain_exactly('compile', 'compile-slow')
end
end
@@ -193,7 +193,7 @@
let(:options) do
{
select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
+ aggregations: [:mean],
sort: 'name_asc',
name_search: 'test',
source: 'web',
@@ -227,10 +227,10 @@
end
context 'with descending sort' do
- let(:sort_value) { 'mean_duration_in_seconds_desc' }
+ let(:sort_value) { 'mean_desc' }
it 'returns correct field and direction' do
- is_expected.to eq([:mean_duration_in_seconds, :desc])
+ is_expected.to eq([:mean, :desc])
end
end
@@ -247,7 +247,7 @@
let(:options) do
{
select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
+ aggregations: [:mean],
name_search: 'compile'
}
end
@@ -256,7 +256,7 @@
direct_finder_result = ClickHouse::Finders::Ci::FinishedBuildsFinder.new
.for_project(project.id)
.select(:name)
- .mean_duration_in_seconds
+ .mean
.filter_by_job_name('compile')
.execute
@@ -269,14 +269,14 @@
let(:options) do
{
select_fields: [:name],
- aggregations: [:mean_duration_in_seconds]
+ aggregations: [:mean]
}
end
it 'generates valid SQL' do
sql = query_builder.to_sql
expected_sql = <<~SQL.squish.lines(chomp: true).join(' ')
- SELECT `ci_finished_builds`.`name`, round((avg(`ci_finished_builds`.`duration`) / 1000.0), 2) AS mean_duration_in_seconds
+ SELECT `ci_finished_builds`.`name`, round((avg(`ci_finished_builds`.`duration`) / 1000.0), 2) AS mean
FROM `ci_finished_builds` WHERE `ci_finished_builds`.`project_id` = #{project.id} AND `ci_finished_builds`.`pipeline_id` IN
(SELECT `ci_finished_pipelines`.`id` FROM `ci_finished_pipelines` WHERE `ci_finished_pipelines`.`path` = '#{project.project_namespace.traversal_path}'
AND `ci_finished_pipelines`.`started_at` >= toDateTime64('#{7.days.ago.utc.strftime('%Y-%m-%d %H:%M:%S')}', 6, 'UTC'))
@@ -322,17 +322,17 @@
let(:options) { { aggregations: aggregations, select_fields: [:name] } }
context 'with mean duration aggregation' do
- let(:aggregations) { [:mean_duration_in_seconds] }
+ let(:aggregations) { [:mean] }
it 'calculates mean duration correctly' do
is_expected.to include(
- a_hash_including('name' => 'compile', 'mean_duration_in_seconds' => 1.0),
- a_hash_including('name' => 'compile-slow', 'mean_duration_in_seconds' => 5.0)
+ a_hash_including('name' => 'compile', 'mean' => 1.0),
+ a_hash_including('name' => 'compile-slow', 'mean' => 5.0)
)
end
it 'rounds results to 2 decimal places' do
- expect(query_result.map { |r| r['mean_duration_in_seconds'].to_s.split('.').last.size }).to all(be <= 2)
+ expect(query_result.map { |r| r['mean'].to_s.split('.').last.size }).to all(be <= 2)
end
end
@@ -348,10 +348,10 @@
end
context 'with multiple aggregations' do
- let(:aggregations) { [:mean_duration_in_seconds, :rate_of_success] }
+ let(:aggregations) { [:mean, :rate_of_success] }
it 'applies multiple aggregations' do
- expect(query_result.first.keys).to contain_exactly('name', 'mean_duration_in_seconds', 'rate_of_success')
+ expect(query_result.first.keys).to contain_exactly('name', 'mean', 'rate_of_success')
end
end
end
@@ -360,25 +360,25 @@
let(:options) do
{
select_fields: [:name],
- aggregations: [:mean_duration_in_seconds],
+ aggregations: [:mean],
sort: sort
}
end
context 'with ascending sort' do
- let(:sort) { 'mean_duration_in_seconds_asc' }
+ let(:sort) { 'mean_asc' }
it 'sorts results in ascending order' do
- durations = query_result.pluck('mean_duration_in_seconds')
+ durations = query_result.pluck('mean')
expect(durations).to eq(durations.sort)
end
end
context 'with descending sort' do
- let(:sort) { 'mean_duration_in_seconds_desc' }
+ let(:sort) { 'mean_desc' }
it 'sorts results in descending order' do
- durations = query_result.pluck('mean_duration_in_seconds')
+ durations = query_result.pluck('mean')
expect(durations).to eq(durations.sort.reverse)
end
end
@@ -489,8 +489,8 @@
let(:options) do
{
select_fields: [:name, :stage_id],
- aggregations: [:mean_duration_in_seconds, :rate_of_success],
- sort: 'mean_duration_in_seconds_desc',
+ aggregations: [:mean, :rate_of_success],
+ sort: 'mean_desc',
name_search: 'compile',
from_time: 1.day.ago,
to_time: Time.current
@@ -500,12 +500,12 @@
it 'combines all features correctly', :aggregate_failures do
expect(query_result).not_to be_empty
expect(query_result.first.keys).to contain_exactly(
- 'name', 'stage_id', 'mean_duration_in_seconds', 'rate_of_success'
+ 'name', 'stage_id', 'mean', 'rate_of_success'
)
expect(query_result.pluck('name').uniq).to contain_exactly('compile', 'compile-slow')
# Assert sorting
- durations = query_result.pluck('mean_duration_in_seconds')
+ durations = query_result.pluck('mean')
expect(durations).to eq(durations.sort.reverse)
end
end
diff --git a/spec/lib/click_house/finders/ci/finished_builds_finder_spec.rb b/spec/lib/click_house/finders/ci/finished_builds_finder_spec.rb
index 9dc6a65ed333323744a6b11b1a241c2e00526a6c..04d1629bcf671c5dc331bb3f9ddeff57022d32a7 100644
--- a/spec/lib/click_house/finders/ci/finished_builds_finder_spec.rb
+++ b/spec/lib/click_house/finders/ci/finished_builds_finder_spec.rb
@@ -121,8 +121,8 @@
expect do
result
end.to raise_error(ArgumentError,
- "Cannot aggregate columns: [:invalid_aggregation]. Allowed: mean_duration_in_seconds, " \
- "p50_duration, p75_duration, p90_duration, p95_duration, p99_duration, p95_duration_in_seconds, " \
+ "Cannot aggregate columns: [:invalid_aggregation]. Allowed: mean, mean_duration_in_seconds, " \
+ "p50, p75, p90, p95, p99, p95_duration_in_seconds, " \
"rate_of_success, rate_of_failed, rate_of_canceled, rate_of_skipped, " \
"count_success, count_failed, count_canceled, count_skipped, total_count"
)
@@ -130,24 +130,24 @@
end
end
- describe '#mean_duration_in_seconds' do
+ describe '#mean' do
subject(:result) do
instance.for_project(project.id)
.select(:name)
- .mean_duration_in_seconds
+ .mean
.execute
end
it 'calculates average duration correctly' do
is_expected.to include(
- a_hash_including('name' => 'compile', 'mean_duration_in_seconds' => 1.0),
- a_hash_including('name' => 'compile-slow', 'mean_duration_in_seconds' => 5.0),
- a_hash_including('name' => 'rspec', 'mean_duration_in_seconds' => be_within(0.01).of(2.67))
+ a_hash_including('name' => 'compile', 'mean' => 1.0),
+ a_hash_including('name' => 'compile-slow', 'mean' => 5.0),
+ a_hash_including('name' => 'rspec', 'mean' => be_within(0.01).of(2.67))
)
end
it 'rounds the result to 2 decimal places' do
- is_expected.to be_rounded_to_decimal_places('mean_duration_in_seconds', decimal_places: 2)
+ is_expected.to be_rounded_to_decimal_places('mean', decimal_places: 2)
end
end
@@ -190,24 +190,25 @@
describe 'percentile duration methods' do
include_context 'with percentile test data'
- describe '#p50_duration' do
- it_behaves_like 'percentile duration calculation', '50th', 'p50_duration', expected_value: 1.0
+ describe '#p50' do
+ it_behaves_like 'percentile duration calculation', '50th', 'p50', expected_value: 1.0
end
- describe '#p75_duration' do
- it_behaves_like 'percentile duration calculation', '75th', 'p75_duration', expected_value: 1.5
+ describe '#p75' do
+ it_behaves_like 'percentile duration calculation', '75th', 'p75', expected_value: 1.5
end
- describe '#p90_duration' do
- it_behaves_like 'percentile duration calculation', '90th', 'p90_duration', expected_value: 1.8
+ describe '#p90' do
+ it_behaves_like 'percentile duration calculation', '90th', 'p90', expected_value: 1.8
end
describe '#p95_duration_in_seconds' do
- it_behaves_like 'percentile duration calculation', '95th', 'p95_duration_in_seconds', expected_value: 1.9
+ it_behaves_like 'percentile duration calculation', '95th', 'p95_duration_in_seconds',
+ expected_value: 1.9
end
- describe '#p99_duration' do
- it_behaves_like 'percentile duration calculation', '99th', 'p99_duration', expected_value: 1.98
+ describe '#p99' do
+ it_behaves_like 'percentile duration calculation', '99th', 'p99', expected_value: 1.98
end
end
@@ -341,11 +342,11 @@
let(:percentile_result) { result.find { |r| r['name'] == 'percentile-test' } }
it 'calculates 50th percentile duration correctly' do
- expect(percentile_result.fetch('p50_duration')).to be_within(0.1).of(1.0)
+ expect(percentile_result.fetch('p50')).to be_within(0.1).of(1.0)
end
it 'rounds the result to 2 decimal places' do
- is_expected.to be_rounded_to_decimal_places('p50_duration', decimal_places: 2)
+ is_expected.to be_rounded_to_decimal_places('p50', decimal_places: 2)
end
context 'with percentile - 95' do
@@ -353,7 +354,7 @@
it 'calculates 95th percentile duration correctly' do
# p95 of 100ms, 200ms, ..., 2000ms should be 1900ms = 1.9 seconds
- expect(percentile_result.fetch('p95_duration')).to be_within(0.1).of(1.9)
+ expect(percentile_result.fetch('p95')).to be_within(0.1).of(1.9)
end
end
end
diff --git a/spec/lib/gitlab/graphql/pagination/click_house_aggregated_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/click_house_aggregated_connection_spec.rb
index 7ed4687646365432c715044613f01edb6a627a65..152a00bd67946337502edd95fbf75ffa3e7d2523 100644
--- a/spec/lib/gitlab/graphql/pagination/click_house_aggregated_connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/click_house_aggregated_connection_spec.rb
@@ -12,7 +12,7 @@
let(:arguments) { {} }
let(:table) { Arel::Table.new(:ci_finished_builds) }
- let(:mean_duration_in_seconds) do
+ let(:mean_duration) do
duration_function = Arel::Nodes::NamedFunction.new(
'avg',
[table[:duration]]
@@ -26,7 +26,7 @@
Arel::Nodes.build_quoted(1000.0)),
2
]
- ).as('mean_duration_in_seconds')
+ ).as('mean')
end
# query builder that groups by name and calculates mean duration
@@ -34,10 +34,10 @@
ClickHouse::Client::QueryBuilder
.new('ci_finished_builds')
.select(:name)
- .select(mean_duration_in_seconds)
+ .select(mean_duration)
.where(project_id: project.id)
.group(:name)
- .order(Arel.sql('mean_duration_in_seconds'), :desc)
+ .order(Arel.sql('mean'), :desc)
end
# Wrap the query builder in the aggregated relation
@@ -52,9 +52,9 @@
describe '#nodes' do
let(:expected_results) do
[
- { 'name' => 'compile-slow', 'mean_duration_in_seconds' => 5.0 },
- { 'name' => 'rspec', 'mean_duration_in_seconds' => 2.67 },
- { 'name' => 'compile', 'mean_duration_in_seconds' => 1.0 }
+ { 'name' => 'compile-slow', 'mean' => 5.0 },
+ { 'name' => 'rspec', 'mean' => 2.67 },
+ { 'name' => 'compile', 'mean' => 1.0 }
]
end
@@ -92,7 +92,7 @@
end
context 'when after cursor is provided', :aggregate_failures do
- let(:cursor_node) { { 'name' => 'compile-slow', 'mean_duration_in_seconds' => 5.0 } }
+ let(:cursor_node) { { 'name' => 'compile-slow', 'mean' => 5.0 } }
let(:arguments) { { after: encoded_cursor(cursor_node) } }
it 'returns results after the cursor' do
@@ -104,7 +104,7 @@
end
context 'when before cursor is provided', :aggregate_failures do
- let(:cursor_node) { { 'name' => 'compile', 'mean_duration_in_seconds' => 1.0 } }
+ let(:cursor_node) { { 'name' => 'compile', 'mean' => 1.0 } }
let(:arguments) { { before: encoded_cursor(cursor_node) } }
it 'returns results before the cursor' do
@@ -116,8 +116,8 @@
end
context 'when both after and before cursors are provided', :aggregate_failures do
- let(:after_node) { { 'name' => 'compile-slow', 'mean_duration_in_seconds' => 5.0 } }
- let(:before_node) { { 'name' => 'compile', 'mean_duration_in_seconds' => 1.0 } }
+ let(:after_node) { { 'name' => 'compile-slow', 'mean' => 5.0 } }
+ let(:before_node) { { 'name' => 'compile', 'mean' => 1.0 } }
let(:arguments) do
{
after: encoded_cursor(after_node),
@@ -135,7 +135,7 @@
end
context 'when before and last are provided', :aggregate_failures do
- let(:before_node) { { 'name' => 'compile', 'mean_duration_in_seconds' => 1.0 } }
+ let(:before_node) { { 'name' => 'compile', 'mean' => 1.0 } }
let(:arguments) { { last: 1, before: encoded_cursor(before_node) } }
it 'returns the last N results before the cursor' do
@@ -148,7 +148,7 @@
end
context 'when before and first are provided', :aggregate_failures do
- let(:before_node) { { 'name' => 'compile', 'mean_duration_in_seconds' => 1.0 } }
+ let(:before_node) { { 'name' => 'compile', 'mean' => 1.0 } }
let(:arguments) { { first: 1, before: encoded_cursor(before_node) } }
it 'returns the first N results before the cursor' do
@@ -181,7 +181,7 @@
subject(:has_previous_page) { connection.has_previous_page }
context 'when after cursor is provided' do
- let(:cursor_node) { { 'name' => 'compile-slow', 'mean_duration_in_seconds' => 5.0 } }
+ let(:cursor_node) { { 'name' => 'compile-slow', 'mean' => 5.0 } }
let(:arguments) { { after: encoded_cursor(cursor_node) } }
it { is_expected.to be_truthy }
@@ -204,7 +204,7 @@
subject(:has_next_page) { connection.has_next_page }
context 'when before cursor is provided' do
- let(:cursor_node) { { 'name' => 'compile', 'mean_duration_in_seconds' => 1.0 } }
+ let(:cursor_node) { { 'name' => 'compile', 'mean' => 1.0 } }
let(:arguments) { { before: encoded_cursor(cursor_node) } }
it { is_expected.to be_truthy }
@@ -224,7 +224,7 @@
end
describe '#cursor_for' do
- let(:node) { { 'name' => 'compile', 'mean_duration_in_seconds' => 1.0 } }
+ let(:node) { { 'name' => 'compile', 'mean' => 1.0 } }
subject(:cursor_for) do
decoded_cursor(connection.cursor_for(node))
@@ -232,14 +232,14 @@
it 'generates a valid cursor' do
is_expected.to eq(
- 'sort_field' => 'mean_duration_in_seconds',
+ 'sort_field' => 'mean',
'sort_value' => 1.0,
'group_by_values' => { 'name' => 'compile' }
)
end
it 'excludes sort field from group_by_values' do
- expect(cursor_for['group_by_values']).not_to have_key('mean_duration_in_seconds')
+ expect(cursor_for['group_by_values']).not_to have_key('mean')
end
end
@@ -255,10 +255,10 @@
ClickHouse::Client::QueryBuilder
.new('ci_finished_builds')
.select(table[:name], table[:stage_id])
- .select(mean_duration_in_seconds)
+ .select(mean_duration)
.where(project_id: project.id)
.group(table[:name], table[:stage_id])
- .order(Arel.sql('mean_duration_in_seconds'), :desc)
+ .order(Arel.sql('mean'), :desc)
end
it { is_expected.to contain_exactly('name', 'stage_id') }
@@ -273,15 +273,15 @@
ClickHouse::Client::QueryBuilder
.new('ci_finished_builds')
.select(:name, :stage_id)
- .select(mean_duration_in_seconds)
+ .select(mean_duration)
.where(project_id: project.id)
.group(:name, :stage_id)
- .order(Arel.sql('mean_duration_in_seconds'), :desc)
+ .order(Arel.sql('mean'), :desc)
end
it 'handles multiple group by fields correctly' do
expect(nodes).not_to be_empty
- expect(nodes.first.keys).to contain_exactly('name', 'stage_id', 'mean_duration_in_seconds')
+ expect(nodes.first.keys).to contain_exactly('name', 'stage_id', 'mean')
end
it 'generates cursors with all group by values' do
@@ -297,14 +297,14 @@
ClickHouse::Client::QueryBuilder
.new('ci_finished_builds')
.select(:name)
- .select(mean_duration_in_seconds)
+ .select(mean_duration)
.where(project_id: project.id)
.group(:name)
- .order(Arel.sql('mean_duration_in_seconds'), :asc)
+ .order(Arel.sql('mean'), :asc)
end
it 'handles ascending order correctly' do
- durations = nodes.pluck('mean_duration_in_seconds')
+ durations = nodes.pluck('mean')
expect(durations).to eq(durations.sort)
end
@@ -319,7 +319,7 @@
end
it 'paginate second page with ascending order' do
- expect(second_page_nodes.first['mean_duration_in_seconds']).to be > cursor_node['mean_duration_in_seconds']
+ expect(second_page_nodes.first['mean']).to be > cursor_node['mean']
end
end
end
@@ -329,7 +329,7 @@
ClickHouse::Client::QueryBuilder
.new('ci_finished_builds')
.select(:name)
- .select(mean_duration_in_seconds)
+ .select(mean_duration)
.where(project_id: project.id)
.group(:name)
.order(:name, :asc)
@@ -358,10 +358,10 @@
ClickHouse::Client::QueryBuilder
.new('ci_finished_builds')
.select(:name)
- .select(mean_duration_in_seconds)
+ .select(mean_duration)
.where(project_id: non_existing_record_id)
.group(:name)
- .order(Arel.sql('mean_duration_in_seconds'), :desc)
+ .order(Arel.sql('mean'), :desc)
end
it 'handles empty results gracefully', :aggregate_failures do
@@ -401,7 +401,7 @@
ClickHouse::Client::QueryBuilder
.new('ci_finished_builds')
.select(:name)
- .select(mean_duration_in_seconds)
+ .select(mean_duration)
.where(project_id: project.id)
.group(:name)
end
diff --git a/spec/requests/api/graphql/project/job_analytics_spec.rb b/spec/requests/api/graphql/project/job_analytics_spec.rb
index d078451ac7f72f48bc9175458ae14e58bf57d758..674b0b99460f201bc3faf7f713edc038d00834da 100644
--- a/spec/requests/api/graphql/project/job_analytics_spec.rb
+++ b/spec/requests/api/graphql/project/job_analytics_spec.rb
@@ -10,22 +10,71 @@
let_it_be_with_reload(:user) { create(:user) }
let(:current_user) { user }
- let(:job_analytics_args) do
+ let(:job_analytics_args) { {} }
+
+ let(:basic_fields) do
+ {
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ p95: nil
+ },
+ success_rate: [:rate, { status: :SUCCESS }],
+ failed_rate: [:rate, { status: :FAILED }],
+ other_rate: [:rate, { status: :OTHER }]
+ }
+ }
+ }
+ end
+
+ let(:simple_name_fields) do
{
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS, :RATE_OF_SUCCESS, :RATE_OF_CANCELED, :RATE_OF_FAILED,
- :P95_DURATION_IN_SECONDS]
+ nodes: {
+ name: nil
+ }
+ }
+ end
+
+ let(:duration_stats_fields) do
+ {
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ mean: nil
+ }
+ }
+ }
}
end
+ let(:pagination_fields) do
+ {
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ mean: nil
+ }
+ }
+ },
+ page_info: {
+ has_next_page: nil,
+ has_previous_page: nil,
+ start_cursor: nil,
+ end_cursor: nil
+ }
+ }
+ end
+
+ let(:job_analytics_fields) { hash_to_graphql_fields(basic_fields) }
+
let(:query) do
graphql_query_for(
:project,
- { full_path: project.full_path },
- query_nodes(:job_analytics, all_graphql_fields_for('CiJobAnalytics'),
- args: job_analytics_args,
- include_pagination_info: true
- )
+ { fullPath: project.full_path },
+ query_graphql_field(:jobAnalytics, job_analytics_args, job_analytics_fields)
)
end
@@ -67,24 +116,18 @@ def expect_valid_job_analytics_response
expect_valid_job_analytics_response
end
- it 'returns aggregated metrics' do
+ it 'returns aggregated metrics in nested structures' do
post_graphql(query, current_user: current_user)
expect_valid_job_analytics_response
- %w[name meanDurationInSeconds p95DurationInSeconds rateOfFailed].each do |key|
- expect(nodes).to all(have_key(key))
- end
+ expect(nodes).to all(have_key('name'))
+ expect(nodes).to all(have_key('statistics'))
end
end
context 'with name search filter' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- name_search: 'compile'
- }
- end
+ let(:job_analytics_args) { { nameSearch: 'compile' } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(duration_stats_fields) }
it 'filters jobs by name' do
post_graphql(query, current_user: current_user)
@@ -94,36 +137,9 @@ def expect_valid_job_analytics_response
end
end
- context 'with stage selection' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME, :STAGE],
- aggregations: [:MEAN_DURATION_IN_SECONDS, :RATE_OF_SUCCESS],
- name_search: 'compile'
- }
- end
-
- it 'includes stage information' do
- post_graphql(query, current_user: current_user)
-
- expect_valid_job_analytics_response
- expect(nodes).to all(have_key('stage'))
- nodes.each do |node|
- expect(node['stage']).to have_key('id')
- expect(node['stage']).to have_key('name')
- end
- end
- end
-
context 'with time range filters' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- from_time: 13.hours.ago.iso8601,
- to_time: Time.current.iso8601
- }
- end
+ let(:job_analytics_args) { { fromTime: 13.hours.ago.iso8601, toTime: Time.current.iso8601 } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(duration_stats_fields) }
it 'filters by time range' do
post_graphql(query, current_user: current_user)
@@ -135,12 +151,16 @@ def expect_valid_job_analytics_response
end
context 'with source filter' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:RATE_OF_SUCCESS],
- source: :WEB
- }
+ let(:job_analytics_args) { { source: :WEB } }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ success_rate: [:rate, { status: :SUCCESS }]
+ }
+ }
+ })
end
it 'filters by pipeline source' do
@@ -151,13 +171,8 @@ def expect_valid_job_analytics_response
end
context 'with invalid source value' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:RATE_OF_SUCCESS],
- source: :INVALID_SOURCE
- }
- end
+ let(:job_analytics_args) { { source: :INVALID_SOURCE } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(simple_name_fields) }
it 'returns a GraphQL error' do
post_graphql(query, current_user: current_user)
@@ -169,12 +184,16 @@ def expect_valid_job_analytics_response
end
context 'with ref filter' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:RATE_OF_SUCCESS],
- ref: 'feature-branch'
- }
+ let(:job_analytics_args) { { ref: 'feature-branch' } }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ success_rate: [:rate, { status: :SUCCESS }]
+ }
+ }
+ })
end
it 'filters by ref' do
@@ -185,13 +204,8 @@ def expect_valid_job_analytics_response
end
context 'with non existing ref value' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:RATE_OF_SUCCESS],
- ref: non_existing_project_hashed_path
- }
- end
+ let(:job_analytics_args) { { ref: non_existing_project_hashed_path } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(simple_name_fields) }
it 'filters by ref' do
post_graphql(query, current_user: current_user)
@@ -204,49 +218,57 @@ def expect_valid_job_analytics_response
context 'with sorting' do
context 'when sorted by mean duration ascending' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- sort: :MEAN_DURATION_ASC
- }
- end
+ let(:job_analytics_args) { { sort: :MEAN_DURATION_ASC } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(duration_stats_fields) }
it 'sorts results by mean duration' do
post_graphql(query, current_user: current_user)
expect_valid_job_analytics_response
- durations = nodes.pluck('meanDurationInSeconds')
- expect(durations).to eq(durations.sort)
+ durations = nodes.map { |n| n.dig('statistics', 'durationStatistics', 'mean') }
+ expect(durations).to eq(durations.compact.sort)
end
end
context 'when sorted by failure rate descending' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:RATE_OF_FAILED],
- sort: :FAILED_RATE_DESC
- }
+ let(:job_analytics_args) { { sort: :FAILED_RATE_DESC } }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ failed_rate: [:rate, { status: :FAILED }]
+ }
+ }
+ })
end
it 'sorts results by failure rate' do
post_graphql(query, current_user: current_user)
expect_valid_job_analytics_response
- rates = nodes.pluck('rateOfFailed')
- expect(rates).to eq(rates.sort.reverse)
+ rates = nodes.map { |n| n.dig('statistics', 'failedRate') }
+ expect(rates).to eq(rates.compact.sort.reverse)
end
end
end
context 'with all aggregations' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS, :P95_DURATION_IN_SECONDS, :RATE_OF_SUCCESS, :RATE_OF_FAILED,
- :RATE_OF_CANCELED]
- }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ mean: nil,
+ p95: nil
+ },
+ success_rate: [:rate, { status: :SUCCESS }],
+ failed_rate: [:rate, { status: :FAILED }],
+ other_rate: [:rate, { status: :OTHER }]
+ }
+ }
+ })
end
it 'returns all aggregation metrics' do
@@ -254,20 +276,20 @@ def expect_valid_job_analytics_response
expect_valid_job_analytics_response
- %w[meanDurationInSeconds p95DurationInSeconds rateOfSuccess rateOfFailed rateOfCanceled].each do |key|
- expect(nodes).to all(have_key(key))
+ nodes.each do |node|
+ statistics = node['statistics']
+ expect(statistics['durationStatistics']).to have_key('mean')
+ expect(statistics['durationStatistics']).to have_key('p95')
+ expect(statistics).to have_key('successRate')
+ expect(statistics).to have_key('failedRate')
+ expect(statistics).to have_key('otherRate')
end
end
end
context 'with pagination' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- first: 2
- }
- end
+ let(:job_analytics_args) { { first: 2 } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(pagination_fields) }
it 'supports pagination' do
post_graphql(query, current_user: current_user)
@@ -278,18 +300,30 @@ def expect_valid_job_analytics_response
end
context 'with cursor-based pagination' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- first: 1,
- sort: :NAME_ASC
- }
- end
-
- let(:first_page_query) { query }
-
it 'supports cursor-based pagination' do
+ first_page_query = graphql_query_for(
+ :project,
+ { fullPath: project.full_path },
+ query_graphql_field(
+ :jobAnalytics,
+ { first: 1, sort: :NAME_ASC },
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ mean: nil
+ }
+ }
+ },
+ page_info: {
+ has_next_page: nil,
+ end_cursor: nil
+ }
+ })
+ )
+ )
+
post_graphql(first_page_query, current_user: current_user)
expect(nodes.pluck('name')).to eq(%w[compile])
@@ -299,11 +333,9 @@ def expect_valid_job_analytics_response
next_page_query = graphql_query_for(
:project,
- { full_path: project.full_path },
- query_nodes(:job_analytics, all_graphql_fields_for('CiJobAnalytics'),
- args: job_analytics_args.merge(after: cursor),
- include_pagination_info: true
- )
+ { fullPath: project.full_path },
+ query_graphql_field(:jobAnalytics, { first: 1, sort: :NAME_ASC, after: cursor },
+ hash_to_graphql_fields(simple_name_fields))
)
post_graphql(next_page_query, current_user: current_user)
@@ -313,14 +345,8 @@ def expect_valid_job_analytics_response
end
context 'for backward pagination' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- last: 1,
- sort: :NAME_ASC
- }
- end
+ let(:job_analytics_args) { { last: 1, sort: :NAME_ASC } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(simple_name_fields) }
it 'returns last page of results' do
post_graphql(query, current_user: current_user)
@@ -330,23 +356,41 @@ def expect_valid_job_analytics_response
end
context 'with cursor-based backward pagination' do
- let(:last_page_cursor) do
- post_graphql(query, current_user: current_user)
- job_analytics_response['pageInfo']['endCursor']
- end
+ it 'paginates backward' do
+ last_page_query = graphql_query_for(
+ :project,
+ { fullPath: project.full_path },
+ query_graphql_field(
+ :jobAnalytics,
+ { last: 1, sort: :NAME_ASC },
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil
+ },
+ page_info: {
+ end_cursor: nil
+ }
+ })
+ )
+ )
- let(:second_last_page_query) do
- graphql_query_for(
+ post_graphql(last_page_query, current_user: current_user)
+ last_page_cursor = job_analytics_response['pageInfo']['endCursor']
+
+ second_last_page_query = graphql_query_for(
:project,
- { full_path: project.full_path },
- query_nodes(:job_analytics, all_graphql_fields_for('CiJobAnalytics'),
- args: job_analytics_args.merge(before: last_page_cursor),
- include_pagination_info: true
+ { fullPath: project.full_path },
+ query_graphql_field(
+ :jobAnalytics,
+ { last: 1, sort: :NAME_ASC, before: last_page_cursor },
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil
+ }
+ })
)
)
- end
- it 'paginates backward' do
post_graphql(second_last_page_query, current_user: current_user)
expect_valid_job_analytics_response
@@ -356,14 +400,8 @@ def expect_valid_job_analytics_response
end
context 'with huge first limit value' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- first: 1000,
- sort: :NAME_ASC
- }
- end
+ let(:job_analytics_args) { { first: 1000, sort: :NAME_ASC } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(simple_name_fields) }
before do
allow_next_instance_of(Gitlab::Graphql::Pagination::ClickHouseAggregatedConnection) do |instance|
@@ -383,17 +421,32 @@ def expect_valid_job_analytics_response
context 'with complex filters' do
let(:job_analytics_args) do
{
- select_fields: [:NAME, :STAGE],
- aggregations: [:MEAN_DURATION_IN_SECONDS, :RATE_OF_FAILED],
- name_search: 'source',
+ nameSearch: 'source',
source: :WEB,
ref: 'master',
- from_time: 7.days.ago.iso8601,
- to_time: Time.current.iso8601,
+ fromTime: 7.days.ago.iso8601,
+ toTime: Time.current.iso8601,
sort: :FAILED_RATE_ASC
}
end
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ stage: {
+ name: nil
+ },
+ statistics: {
+ duration_statistics: {
+ mean: nil
+ },
+ failed_rate: [:rate, { status: :FAILED }]
+ }
+ }
+ })
+ end
+
it 'applies multiple filters correctly' do
post_graphql(query, current_user: current_user)
@@ -416,13 +469,8 @@ def expect_valid_job_analytics_response
end
context 'with invalid sort value' do
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS],
- sort: :INVALID_SORT
- }
- end
+ let(:job_analytics_args) { { sort: 'INVALID_SORT' } }
+ let(:job_analytics_fields) { hash_to_graphql_fields(simple_name_fields) }
it 'returns a GraphQL error' do
post_graphql(query, current_user: current_user)
@@ -431,17 +479,360 @@ def expect_valid_job_analytics_response
expect_graphql_errors_to_include("Argument 'sort' on Field 'jobAnalytics' has an invalid value")
end
end
+
+ context 'with stage selection' do
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ stage: {
+ id: nil,
+ name: nil
+ },
+ statistics: {
+ total_count: [:count]
+ }
+ }
+ })
+ end
+
+ it 'returns stage information' do
+ post_graphql(query, current_user: current_user)
+
+ expect_valid_job_analytics_response
+ expect(nodes).to all(have_key('stage'))
+ end
+ end
+
+ context 'when only name is requested' do
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil
+ }
+ })
+ end
+
+ it 'works with minimal field selection' do
+ post_graphql(query, current_user: current_user)
+
+ expect_valid_job_analytics_response
+ expect(nodes.first.keys).to eq(['name'])
+ end
+ end
+
+ describe 'count fields with status parameter' do
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ success_count: [:count, { status: :SUCCESS }],
+ failed_count: [:count, { status: :FAILED }],
+ total_count: [:count]
+ }
+ }
+ })
+ end
+
+ it 'returns count fields for each job' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ expect(nodes).to be_an(Array)
+ expect(nodes).to all(have_key('statistics'))
+
+ nodes.each do |node|
+ statistics = node['statistics']
+ expect(statistics).to have_key('successCount')
+ expect(statistics).to have_key('failedCount')
+ expect(statistics).to have_key('totalCount')
+ end
+ end
+
+ it 'returns correct count values' do
+ post_graphql(query, current_user: current_user)
+
+ compile_node = nodes.find { |n| n['name'] == 'compile' }
+ rspec_node = nodes.find { |n| n['name'] == 'rspec' }
+
+ # compile: 3 successful builds
+ expect(compile_node['statistics']['successCount'].to_i).to eq(3)
+ expect(compile_node['statistics']['failedCount'].to_i).to eq(0)
+ expect(compile_node['statistics']['totalCount'].to_i).to eq(3)
+
+ # rspec: 2 failed, 1 canceled (other)
+ expect(rspec_node['statistics']['successCount'].to_i).to eq(0)
+ expect(rspec_node['statistics']['failedCount'].to_i).to eq(2)
+ expect(rspec_node['statistics']['totalCount'].to_i).to eq(3)
+ end
+ end
+
+ describe 'combining counts with rates using status parameter' do
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ success_count: [:count, { status: :SUCCESS }],
+ failed_count: [:count, { status: :FAILED }],
+ success_rate: [:rate, { status: :SUCCESS }],
+ failed_rate: [:rate, { status: :FAILED }],
+ total_count: [:count]
+ }
+ }
+ })
+ end
+
+ it 'returns both counts and rates' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+
+ compile_node = nodes.find { |n| n['name'] == 'compile' }
+ statistics = compile_node['statistics']
+
+ # Verify counts
+ expect(statistics['successCount'].to_i).to eq(3)
+ expect(statistics['failedCount'].to_i).to eq(0)
+ expect(statistics['totalCount'].to_i).to eq(3)
+
+ # Verify rates
+ expect(statistics['successRate']).to eq(100.0)
+ expect(statistics['failedRate']).to eq(0.0)
+ end
+
+ it 'allows frontend to calculate custom rates from counts' do
+ post_graphql(query, current_user: current_user)
+
+ rspec_node = nodes.find { |n| n['name'] == 'rspec' }
+ statistics = rspec_node['statistics']
+
+ calculated_failed_rate = ((statistics['failedCount'].to_f / Float(statistics['totalCount'])) * 100).round(2)
+ expect(calculated_failed_rate).to be_within(0.01).of(statistics['failedRate'])
+ end
+ end
+
+ describe 'durationStatistics field' do
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ mean: nil,
+ p50: nil,
+ p95: nil
+ }
+ }
+ }
+ })
+ end
+
+ it 'returns duration statistics as a nested object' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ expect(nodes).to be_an(Array)
+ expect(nodes).to all(have_key('statistics'))
+
+ nodes.each do |node|
+ stats = node['statistics']['durationStatistics']
+ expect(stats).to have_key('mean')
+ expect(stats).to have_key('p50')
+ expect(stats).to have_key('p95')
+ end
+ end
+
+ it 'returns correct duration values' do
+ post_graphql(query, current_user: current_user)
+
+ compile_node = nodes.find { |n| n['name'] == 'compile' }
+ stats = compile_node['statistics']['durationStatistics']
+
+ # compile jobs have 1 second duration
+ expect(stats['mean']).to eq(1.0)
+ expect(stats['p50']).to eq(1.0)
+ expect(stats['p95']).to eq(1.0)
+ end
+ end
+
+ describe 'p50 duration field' do
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ p50: nil
+ }
+ }
+ }
+ })
+ end
+
+ it 'returns p50 (median) duration' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ nodes.each do |node|
+ expect(node['statistics']['durationStatistics']).to have_key('p50')
+ end
+ end
+ end
+
+ describe 'sorting by p50 duration' do
+ let(:job_analytics_args) { { sort: :P50_DURATION_ASC } }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ p50: nil
+ }
+ }
+ }
+ })
+ end
+
+ it 'sorts by p50 duration ascending' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ durations = nodes.map { |n| n.dig('statistics', 'durationStatistics', 'p50') }
+ expect(durations).to eq(durations.compact.sort)
+ end
+ end
+
+ describe 'all percentile values' do
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ p50: nil,
+ p75: nil,
+ p90: nil,
+ p95: nil,
+ p99: nil
+ }
+ }
+ }
+ })
+ end
+
+ it 'returns all percentile values' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ nodes.each do |node|
+ stats = node['statistics']['durationStatistics']
+ expect(stats).to have_key('p50')
+ expect(stats).to have_key('p75')
+ expect(stats).to have_key('p90')
+ expect(stats).to have_key('p95')
+ expect(stats).to have_key('p99')
+ end
+ end
+ end
+
+ describe 'sorting by different percentiles' do
+ context 'when sorted by p75 duration' do
+ let(:job_analytics_args) { { sort: :P75_DURATION_DESC } }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ p75: nil
+ }
+ }
+ }
+ })
+ end
+
+ it 'sorts by p75 duration descending' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ durations = nodes.map { |n| n.dig('statistics', 'durationStatistics', 'p75') }
+ expect(durations).to eq(durations.compact.sort.reverse)
+ end
+ end
+
+ context 'when sorted by p99 duration' do
+ let(:job_analytics_args) { { sort: :P99_DURATION_ASC } }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ p99: nil
+ }
+ }
+ }
+ })
+ end
+
+ it 'sorts by p99 duration ascending' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ durations = nodes.map { |n| n.dig('statistics', 'durationStatistics', 'p99') }
+ expect(durations).to eq(durations.compact.sort)
+ end
+ end
+ end
+
+ describe 'when no duration aggregations are requested' do
+ it 'does not include durationStatistics when only status stats are requested' do
+ query_without_duration = graphql_query_for(
+ :project,
+ { fullPath: project.full_path },
+ query_graphql_field(
+ :jobAnalytics,
+ {},
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ total_count: [:count]
+ }
+ }
+ })
+ )
+ )
+
+ post_graphql(query_without_duration, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ expect(nodes).to all(have_key('statistics'))
+ nodes.each do |node|
+ expect(node['statistics'].keys).not_to include('durationStatistics')
+ end
+ end
+ end
end
context 'when project is private' do
let_it_be_with_reload(:private_project) { create(:project, :private) }
let(:project) { private_project }
- let(:job_analytics_args) do
- {
- select_fields: [:NAME],
- aggregations: [:MEAN_DURATION_IN_SECONDS]
- }
+ let(:job_analytics_fields) do
+ hash_to_graphql_fields({
+ nodes: {
+ name: nil,
+ statistics: {
+ duration_statistics: {
+ mean: nil
+ }
+ }
+ }
+ })
end
context 'when user is not a member' do
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 3e8c2061a9507fb05c8a7a2de7f0f8b80d8e5ae4..d7d0bc03b6881a175bd40bb354596b64c0a221b1 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -921,6 +921,55 @@ def calculate_query_complexity(query_string)
GraphQL::Analysis::AST.analyze_query(query, [analyzer]).first
end
+ # Helper to convert Ruby hash structure to GraphQL field string
+ #
+ # Supports:
+ # - Simple fields: { name: nil }
+ # - Nested objects: { statistics: { mean: nil, p95: nil } }
+ # - Field aliases with args: { success_rate: [:rate, { status: :SUCCESS }] }
+ # - Field aliases without args: { total_count: [:count] }
+ # - Fields with args (no alias): { rate: { status: :SUCCESS } }
+ #
+ # Example:
+ # {
+ # nodes: {
+ # name: nil,
+ # statistics: {
+ # duration_statistics: {
+ # mean: nil,
+ # p95: nil
+ # },
+ # success_rate: [:rate, { status: :SUCCESS }],
+ # total_count: [:count]
+ # }
+ # }
+ # }
+ def hash_to_graphql_fields(hash)
+ hash.map do |key, value|
+ case value
+ when NilClass, TrueClass
+ # Simple field
+ GraphqlHelpers.fieldnamerize(key)
+ when Array
+ # Field alias: [:actual_field] or [:actual_field, {args}]
+ actual_field, args = value
+ args ||= {}
+ "#{GraphqlHelpers.fieldnamerize(key)}: #{field_with_params(actual_field, args)}"
+ when Hash
+ # Check if it's a field with arguments or nested object
+ if value.is_a?(Hash) && value.values.all? { |v| v.is_a?(Symbol) || v.is_a?(String) }
+ # Field with arguments (no alias)
+ field_with_params(key, value)
+ else
+ # Nested object
+ "#{GraphqlHelpers.fieldnamerize(key)} { #{hash_to_graphql_fields(value)} }"
+ end
+ else
+ GraphqlHelpers.fieldnamerize(value)
+ end
+ end.join(' ')
+ end
+
private
def to_base_field(name_or_field, object_type)