diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb index 72d4097a945a105446ae761b152c9cacd8b7cf3c..7cca53031a15ec25aa4b89b02ac08ccc6a22f2de 100644 --- a/app/models/ci/pipeline.rb +++ b/app/models/ci/pipeline.rb @@ -690,6 +690,10 @@ def ref_exists? false end + def resource_parent + project + end + def triggered_pipelines_with_preloads triggered_pipelines.preload(:source_job) end diff --git a/app/serializers/ci/pipeline_serializer.rb b/app/serializers/ci/pipeline_serializer.rb new file mode 100644 index 0000000000000000000000000000000000000000..8dd4b894791853a7220702ef46ee8897324831e4 --- /dev/null +++ b/app/serializers/ci/pipeline_serializer.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module Ci + class PipelineSerializer < BaseSerializer + entity Ci::PipelineEntity + end +end diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml index 276b0e0c2b4d5ddac28ded2896849fbd5e1073e3..7c92b81e5f9ab5bfa7933fa3b4131b3cf42cd893 100644 --- a/config/sidekiq_queues.yml +++ b/config/sidekiq_queues.yml @@ -39,6 +39,8 @@ - 1 - - ai_duo_workflows_update_workflow_status_event - 1 +- - ai_flow_triggers_trigger_pipeline_flows + - 1 - - ai_knowledge_graph_indexing_task - 1 - - ai_repository_xray_scan_dependencies diff --git a/ee/app/models/ai/ai_resource/ci/pipeline.rb b/ee/app/models/ai/ai_resource/ci/pipeline.rb new file mode 100644 index 0000000000000000000000000000000000000000..a795ea81369ce6410d33e548c71ddde1bbae0115 --- /dev/null +++ b/ee/app/models/ai/ai_resource/ci/pipeline.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +module Ai + module AiResource + module Ci + class Pipeline < Ai::AiResource::BaseAiResource + include Ai::AiResource::Concerns::Noteable + + CHAT_QUESTIONS = [ + "What errors or warnings appeared in this pipeline?", + "What caused any slow or failing steps in this pipeline?", + "Were all dependencies available in this pipeline?", + "What was each job's final status?" + ].freeze + + CHAT_UNIT_PRIMITIVE = :ask_build + + def serialize_for_ai(content_limit: default_content_limit) + ::Ci::PipelineSerializer # rubocop: disable CodeReuse/Serializer -- existing serializer + .new(current_user: current_user, project: resource.project) + .represent(resource, { + user: current_user, + content_limit: content_limit, + serializer: 'ai', + resource: self + }) + end + + def current_page_type + "pipeline" + end + + def current_page_params + { + type: current_page_type + } + end + end + end + end +end diff --git a/ee/app/models/ai/flow_trigger.rb b/ee/app/models/ai/flow_trigger.rb index 1f283229c69982f1f0b6ce6d83e6ee9edbad76a9..3c4cbb8d4b8978cb948e51a5c4a51be3271b8119 100644 --- a/ee/app/models/ai/flow_trigger.rb +++ b/ee/app/models/ai/flow_trigger.rb @@ -6,7 +6,9 @@ class FlowTrigger < ApplicationRecord EVENT_TYPES = { mention: 0, - assign: 1 + assign: 1, + pipeline_failure: 2, + pipeline_success: 3 }.freeze belongs_to :project diff --git a/ee/app/services/ai/flow_triggers/run_service.rb b/ee/app/services/ai/flow_triggers/run_service.rb index c00f3a006d806d9fd2513f87840fb972804eab2e..86422835b2210a6e4e90071964126d8da5c4bf1f 100644 --- a/ee/app/services/ai/flow_triggers/run_service.rb +++ b/ee/app/services/ai/flow_triggers/run_service.rb @@ -3,12 +3,13 @@ module Ai module FlowTriggers class RunService - def initialize(project:, current_user:, resource:, flow_trigger:) + def initialize(project:, current_user:, resource:, flow_trigger:, create_note: true) @project = project @current_user = current_user @resource = resource @flow_trigger = flow_trigger @flow_trigger_user = flow_trigger.user + @create_note = create_note link_composite_identity! if can_use_composite_identity? end @@ -30,18 +31,23 @@ def execute(params) workflow = wf_create_result[:workflow] params[:flow_id] = workflow.id - note_service = ::Ai::FlowTriggers::CreateNoteService.new( - project: project, resource: resource, author: flow_trigger_user, discussion: params[:discussion] - ) - note_service.execute(params) do |updated_params| - run_workload(updated_params, workflow) + if create_note + note_service = ::Ai::FlowTriggers::CreateNoteService.new( + project: project, resource: resource, author: flow_trigger_user, discussion: params[:discussion] + ) + + note_service.execute(params) do |updated_params| + run_workload(updated_params, workflow) + end + else + run_workload(params, workflow) end end private - attr_reader :project, :current_user, :resource, :flow_trigger, :flow_trigger_user + attr_reader :project, :current_user, :resource, :flow_trigger, :flow_trigger_user, :create_note def run_workload(params, workflow) flow_definition = fetch_flow_definition @@ -124,7 +130,13 @@ def build_variables(params) def branch_args args = { create_branch: true } - args[:source_branch] = resource.source_branch if resource.is_a?(MergeRequest) + + if resource.is_a?(MergeRequest) + args[:source_branch] = resource.source_branch + elsif resource.is_a?(Ci::Pipeline) + args[:source_branch] = resource.sha + end + args end diff --git a/ee/app/workers/ai/flow_triggers/trigger_pipeline_flows_worker.rb b/ee/app/workers/ai/flow_triggers/trigger_pipeline_flows_worker.rb new file mode 100644 index 0000000000000000000000000000000000000000..e0fd537a5b99cbe8c262afa5c4cd817d9eefdb55 --- /dev/null +++ b/ee/app/workers/ai/flow_triggers/trigger_pipeline_flows_worker.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +module Ai + module FlowTriggers + # rubocop: disable Scalability/IdempotentWorker -- EventStore::Subscriber includes idempotent + class TriggerPipelineFlowsWorker + include Gitlab::EventStore::Subscriber + + feature_category :agent_foundations + data_consistency :delayed + + def handle_event(event) + pipeline = Ci::Pipeline.find_by_id(event.data[:pipeline_id]) + return unless pipeline + + # Exit if the pipeline was from Duo Workflow + return if pipeline.source == "duo_workflow" + + trigger_event = + case pipeline.status + when 'success' then :pipeline_success + when 'failed' then :pipeline_failure + end + return unless trigger_event + + flow_triggers = pipeline.project.ai_flow_triggers.triggered_on(trigger_event) + return if flow_triggers.blank? + + flow_triggers.find_each do |flow_trigger| + ::Ai::FlowTriggers::RunService.new( + project: pipeline.project, + current_user: pipeline.user, + resource: pipeline, + flow_trigger: flow_trigger, + create_note: false + ).execute({ input: "", event: trigger_event }) + end + end + end + # rubocop: enable Scalability/IdempotentWorker + end +end diff --git a/ee/app/workers/all_queues.yml b/ee/app/workers/all_queues.yml index 7c33f81058c1d57f2aa8c3c4745c54a92eb38a6e..a402a392a437c73e520a88dd9fa578b4f2aff551 100644 --- a/ee/app/workers/all_queues.yml +++ b/ee/app/workers/all_queues.yml @@ -1453,6 +1453,16 @@ :idempotent: true :tags: [] :queue_namespace: +- :name: ai_flow_triggers_trigger_pipeline_flows + :worker_name: Ai::FlowTriggers::TriggerPipelineFlowsWorker + :feature_category: :agent_foundations + :has_external_dependencies: false + :urgency: :low + :resource_boundary: :unknown + :weight: 1 + :idempotent: true + :tags: [] + :queue_namespace: - :name: ai_knowledge_graph_indexing_task :worker_name: Ai::KnowledgeGraph::IndexingTaskWorker :feature_category: :knowledge_graph diff --git a/ee/lib/ee/gitlab/event_store.rb b/ee/lib/ee/gitlab/event_store.rb index fb07e03f9ddfaae5f1629f1022fefb5a9df9f390..44cef1077504ff97c1390e2dc365f3c9e83f2364 100644 --- a/ee/lib/ee/gitlab/event_store.rb +++ b/ee/lib/ee/gitlab/event_store.rb @@ -339,6 +339,7 @@ def subscribe_to_analytics_events(store) def subscribe_to_ci_events(store) store.subscribe ::Security::Scans::PurgeByJobIdWorker, to: ::Ci::JobArtifactsDeletedEvent store.subscribe ::Security::Scans::IngestReportsWorker, to: ::Ci::JobSecurityScanCompletedEvent + store.subscribe ::Ai::FlowTriggers::TriggerPipelineFlowsWorker, to: ::Ci::PipelineFinishedEvent store.subscribe ::Ai::DuoWorkflows::UpdateWorkflowStatusEventWorker, to: ::Ci::Workloads::WorkloadFinishedEvent end diff --git a/ee/spec/models/ai/ai_resource/ci/pipeline_spec.rb b/ee/spec/models/ai/ai_resource/ci/pipeline_spec.rb new file mode 100644 index 0000000000000000000000000000000000000000..b5e2eb0ce1efee24873be475ebba8a70d96749d6 --- /dev/null +++ b/ee/spec/models/ai/ai_resource/ci/pipeline_spec.rb @@ -0,0 +1,128 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ai::AiResource::Ci::Pipeline, feature_category: :duo_chat do + let_it_be(:project) { create(:project, :public, :repository) } + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + let(:user) { create(:user) } + let(:content_limit) { 100000 } + + subject(:wrapped_pipeline) { described_class.new(user, pipeline) } + + describe 'constants' do + it 'defines CHAT_QUESTIONS' do + expect(described_class::CHAT_QUESTIONS).to eq([ + "What errors or warnings appeared in this pipeline?", + "What caused any slow or failing steps in this pipeline?", + "Were all dependencies available in this pipeline?", + "What was each job's final status?" + ]) + end + + it 'defines CHAT_UNIT_PRIMITIVE' do + expect(described_class::CHAT_UNIT_PRIMITIVE).to eq(:ask_build) + end + end + + describe 'inheritance and includes' do + it 'inherits from BaseAiResource' do + expect(described_class.superclass).to eq(Ai::AiResource::BaseAiResource) + end + + it 'includes Noteable concern' do + expect(described_class.included_modules).to include(Ai::AiResource::Concerns::Noteable) + end + end + + describe '#serialize_for_ai' do + let(:expected_options) do + { + user: user, + content_limit: content_limit, + serializer: 'ai', + resource: wrapped_pipeline + } + end + + it 'calls the pipeline serializer with correct parameters' do + serializer_instance = instance_double(Ci::PipelineSerializer) + allow(Ci::PipelineSerializer).to receive(:new) + .with(current_user: user, project: pipeline.project) + .and_return(serializer_instance) + + expect(serializer_instance).to receive(:represent) + .with(pipeline, expected_options) + + wrapped_pipeline.serialize_for_ai(content_limit: content_limit) + end + + it 'uses default content limit when not specified' do + serializer_instance = instance_double(Ci::PipelineSerializer) + allow(Ci::PipelineSerializer).to receive(:new) + .with(current_user: user, project: pipeline.project) + .and_return(serializer_instance) + + expect(serializer_instance).to receive(:represent) + .with(pipeline, hash_including(content_limit: 100_000)) + + wrapped_pipeline.serialize_for_ai + end + + it 'passes the wrapped resource instance to serializer' do + serializer_instance = instance_double(Ci::PipelineSerializer) + allow(Ci::PipelineSerializer).to receive(:new) + .with(current_user: user, project: pipeline.project) + .and_return(serializer_instance) + + expect(serializer_instance).to receive(:represent) + .with(pipeline, hash_including(resource: wrapped_pipeline)) + + wrapped_pipeline.serialize_for_ai(content_limit: content_limit) + end + end + + describe '#current_page_type' do + it 'returns "pipeline"' do + expect(wrapped_pipeline.current_page_type).to eq('pipeline') + end + end + + describe '#current_page_params' do + it 'returns params with type' do + expect(wrapped_pipeline.current_page_params).to eq({ + type: 'pipeline' + }) + end + + it 'returns hash with correct keys' do + expect(wrapped_pipeline.current_page_params.keys).to eq([:type]) + end + end + + describe '#chat_questions' do + it 'returns the class CHAT_QUESTIONS constant' do + expect(wrapped_pipeline.chat_questions).to eq(described_class::CHAT_QUESTIONS) + end + end + + describe '#chat_unit_primitive' do + it 'returns the class CHAT_UNIT_PRIMITIVE constant' do + expect(wrapped_pipeline.chat_unit_primitive).to eq(:ask_build) + end + end + + describe 'inherited methods' do + it 'has access to resource' do + expect(wrapped_pipeline.resource).to eq(pipeline) + end + + it 'has access to current_user' do + expect(wrapped_pipeline.current_user).to eq(user) + end + + it 'has default_content_limit' do + expect(wrapped_pipeline.default_content_limit).to eq(100_000) + end + end +end diff --git a/ee/spec/services/ai/flow_triggers/run_service_spec.rb b/ee/spec/services/ai/flow_triggers/run_service_spec.rb index 1064cbe343b3a37ffb66159d4f22a15a5a180d7f..c6b6ba2b93a0da353bc48e097a1bf06449203f4d 100644 --- a/ee/spec/services/ai/flow_triggers/run_service_spec.rb +++ b/ee/spec/services/ai/flow_triggers/run_service_spec.rb @@ -171,6 +171,29 @@ end end end + + it 'sets create_note to true by default' do + service_instance = described_class.new( + project: project, + current_user: current_user, + resource: resource, + flow_trigger: flow_trigger + ) + + expect(service_instance.instance_variable_get(:@create_note)).to be(true) + end + + it 'sets create_note to false when specified' do + service_instance = described_class.new( + project: project, + current_user: current_user, + resource: resource, + flow_trigger: flow_trigger, + create_note: false + ) + + expect(service_instance.instance_variable_get(:@create_note)).to be(false) + end end describe '#execute' do @@ -663,5 +686,110 @@ end.to change { ::Ai::DuoWorkflows::Workflow.count }.by(1) end end + + context 'when resource is a CI Pipeline' do + let_it_be(:pipeline) do + create(:ci_pipeline, project: project, sha: 'abc123def456') + end + + let_it_be(:resource) { pipeline } + + subject(:service) do + described_class.new( + project: project, + current_user: current_user, + resource: resource, + flow_trigger: flow_trigger, + create_note: false + ) + end + + it 'includes pipeline sha as source branch in branch args' do + expect(Ci::Workloads::RunWorkloadService).to receive(:new).with( + project: project, + current_user: service_account, + source: :duo_workflow, + workload_definition: an_instance_of(Ci::Workloads::WorkloadDefinition), + ci_variables_included: %w[API_KEY DATABASE_URL], + create_branch: true, + source_branch: 'abc123def456' + ).and_call_original + + service.execute(params) + end + end + + context 'with create_note parameter' do + before do + allow(service).to receive(:fetch_flow_definition).and_return(flow_definition) + + token_service_double = instance_double(::Ai::ThirdPartyAgents::TokenService) + allow(::Ai::ThirdPartyAgents::TokenService).to receive(:new) + .with(current_user: current_user) + .and_return(token_service_double) + allow(token_service_double).to receive(:direct_access_token).and_return(mock_token_response) + end + + context 'when create_note is true (default)' do + it 'creates notes via CreateNoteService' do + expect(Note.count).to eq(1) + + response = service.execute(params) + expect(response).to be_success + + expect(Note.count).to eq(2) + expect(Note.last.note).to include('✅ Agent has started. You can view the progress') + end + + it 'calls CreateNoteService with correct parameters' do + note_service_double = instance_double(::Ai::FlowTriggers::CreateNoteService) + expect(::Ai::FlowTriggers::CreateNoteService).to receive(:new).with({ + project: project, + resource: resource, + author: service_account, + discussion: params[:discussion] + }).and_return(note_service_double) + + expect(note_service_double).to receive(:execute).with(params).and_yield(params) + + service.execute(params) + end + end + + context 'when create_note is false' do + subject(:service) do + described_class.new( + project: project, + current_user: current_user, + resource: resource, + flow_trigger: flow_trigger, + create_note: false + ) + end + + it 'does not create notes' do + expect(Note.count).to eq(1) + + response = service.execute(params) + expect(response).to be_success + + expect(Note.count).to eq(1) + end + + it 'does not call CreateNoteService' do + expect(::Ai::FlowTriggers::CreateNoteService).not_to receive(:new) + + service.execute(params) + end + + it 'still creates workflow and workload' do + expect { service.execute(params) }.to change { ::Ai::DuoWorkflows::Workflow.count }.by(1) + expect { service.execute(params) }.to change { ::Ci::Workloads::Workload.count }.by(1) + + response = service.execute(params) + expect(response).to be_success + end + end + end end end diff --git a/ee/spec/workers/ai/flow_triggers/trigger_pipeline_flows_worker_spec.rb b/ee/spec/workers/ai/flow_triggers/trigger_pipeline_flows_worker_spec.rb new file mode 100644 index 0000000000000000000000000000000000000000..67fdd0ce73ef43a176d35bf84102f31915f5458c --- /dev/null +++ b/ee/spec/workers/ai/flow_triggers/trigger_pipeline_flows_worker_spec.rb @@ -0,0 +1,188 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ai::FlowTriggers::TriggerPipelineFlowsWorker, feature_category: :agent_foundations do + subject(:worker) { described_class.new } + + let_it_be_with_refind(:project) { create(:project) } + let_it_be_with_refind(:user) { create(:user, maintainer_of: project) } + let_it_be_with_refind(:pipeline) { create(:ci_pipeline, project: project, user: user) } + + let(:event) { Ci::PipelineFinishedEvent.new(data: { pipeline_id: pipeline.id, status: pipeline.status }) } + + describe '#handle_event' do + context 'when pipeline does not exist' do + let(:event) { Ci::PipelineFinishedEvent.new(data: { pipeline_id: non_existing_record_id, status: 'success' }) } + + it 'returns early without processing' do + expect(Ai::FlowTriggers::RunService).not_to receive(:new) + + worker.handle_event(event) + end + end + + context 'when pipeline is from duo_workflow source' do + before do + pipeline.update!(source: 'duo_workflow') + end + + it 'returns early without processing' do + expect(Ai::FlowTriggers::RunService).not_to receive(:new) + + worker.handle_event(event) + end + end + + context 'when pipeline status is not success or failed' do + before do + pipeline.update!(status: 'running') + end + + it 'returns early without processing' do + expect(Ai::FlowTriggers::RunService).not_to receive(:new) + + worker.handle_event(event) + end + end + + context 'when project has no flow triggers' do + before do + pipeline.update!(status: 'success') + end + + it 'returns early without processing' do + expect(Ai::FlowTriggers::RunService).not_to receive(:new) + + worker.handle_event(event) + end + end + + context 'when pipeline is successful' do + let_it_be(:flow_trigger_success) do + create(:ai_flow_trigger, project: project, event_types: [Ai::FlowTrigger::EVENT_TYPES[:pipeline_success]]) + end + + let_it_be(:flow_trigger_failure) do + create(:ai_flow_trigger, project: project, event_types: [Ai::FlowTrigger::EVENT_TYPES[:pipeline_failure]]) + end + + before do + pipeline.update!(status: 'success') + end + + it 'processes only pipeline_success flow triggers' do + expect(Ai::FlowTriggers::RunService).to receive(:new).with({ + project: pipeline.project, + current_user: pipeline.user, + resource: pipeline, + flow_trigger: flow_trigger_success, + create_note: false + }).and_return(instance_double(Ai::FlowTriggers::RunService, execute: true)) + + expect(Ai::FlowTriggers::RunService).not_to receive(:new).with( + hash_including(flow_trigger: flow_trigger_failure) + ) + + worker.handle_event(event) + end + + it 'calls execute with correct parameters' do + service_instance = instance_double(Ai::FlowTriggers::RunService) + allow(Ai::FlowTriggers::RunService).to receive(:new).and_return(service_instance) + + expect(service_instance).to receive(:execute).with({ input: "", event: :pipeline_success }) + + worker.handle_event(event) + end + end + + context 'when pipeline has failed' do + let_it_be(:flow_trigger_success) do + create(:ai_flow_trigger, project: project, event_types: [Ai::FlowTrigger::EVENT_TYPES[:pipeline_success]]) + end + + let_it_be(:flow_trigger_failure) do + create(:ai_flow_trigger, project: project, event_types: [Ai::FlowTrigger::EVENT_TYPES[:pipeline_failure]]) + end + + before do + pipeline.update!(status: 'failed') + end + + it 'processes only pipeline_failure flow triggers' do + expect(Ai::FlowTriggers::RunService).to receive(:new).with({ + project: pipeline.project, + current_user: pipeline.user, + resource: pipeline, + flow_trigger: flow_trigger_failure, + create_note: false + }).and_return(instance_double(Ai::FlowTriggers::RunService, execute: true)) + + expect(Ai::FlowTriggers::RunService).not_to receive(:new).with( + hash_including(flow_trigger: flow_trigger_success) + ) + + worker.handle_event(event) + end + + it 'calls execute with correct parameters' do + service_instance = instance_double(Ai::FlowTriggers::RunService) + allow(Ai::FlowTriggers::RunService).to receive(:new).and_return(service_instance) + + expect(service_instance).to receive(:execute).with({ input: "", event: :pipeline_failure }) + + worker.handle_event(event) + end + end + + context 'with multiple flow triggers for the same event' do + let_it_be(:flow_trigger1) do + create(:ai_flow_trigger, project: project, event_types: [Ai::FlowTrigger::EVENT_TYPES[:pipeline_success]]) + end + + let_it_be(:flow_trigger2) do + create(:ai_flow_trigger, project: project, event_types: [Ai::FlowTrigger::EVENT_TYPES[:pipeline_success]]) + end + + let_it_be(:flow_trigger3) do + create(:ai_flow_trigger, project: project, event_types: [Ai::FlowTrigger::EVENT_TYPES[:pipeline_failure]]) + end + + before do + pipeline.update!(status: 'success') + end + + it 'processes all matching flow triggers' do + service_instance = instance_double(Ai::FlowTriggers::RunService) + allow(Ai::FlowTriggers::RunService).to receive(:new).and_return(service_instance) + + expect(Ai::FlowTriggers::RunService).to receive(:new).with( + hash_including(flow_trigger: flow_trigger1) + ).and_return(service_instance) + + expect(Ai::FlowTriggers::RunService).to receive(:new).with( + hash_including(flow_trigger: flow_trigger2) + ).and_return(service_instance) + + expect(Ai::FlowTriggers::RunService).not_to receive(:new).with( + hash_including(flow_trigger: flow_trigger3) + ) + + expect(service_instance).to receive(:execute).twice + + worker.handle_event(event) + end + end + end + + describe 'worker configuration' do + it 'includes Gitlab::EventStore::Subscriber' do + expect(described_class.ancestors).to include(Gitlab::EventStore::Subscriber) + end + + it 'has correct feature category' do + expect(described_class.get_feature_category).to eq(:agent_foundations) + end + end +end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 06964240923f91af0d0de66d5213bf0f677d535d..892352124179bd5a07709b9afeb497af3211bd68 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -6964,4 +6964,12 @@ def add_bridge_dependant_dag_job end end end + + describe '#resource_parent' do + it 'returns the associated project' do + pipeline = create(:ci_pipeline, project: project) + + expect(pipeline.resource_parent).to eq(project) + end + end end diff --git a/spec/serializers/ci/pipeline_serializer_spec.rb b/spec/serializers/ci/pipeline_serializer_spec.rb new file mode 100644 index 0000000000000000000000000000000000000000..9aed4a152a0e7d7c9e065bf337857b5b53b5f15c --- /dev/null +++ b/spec/serializers/ci/pipeline_serializer_spec.rb @@ -0,0 +1,95 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::PipelineSerializer, feature_category: :continuous_integration do + let_it_be(:project) { build_stubbed(:project) } + let_it_be(:user) { build_stubbed(:user) } + + let(:serializer) do + described_class.new(current_user: user, project: project) + end + + subject(:data) { serializer.represent(resource) } + + describe '#represent' do + context 'when a single object is being serialized' do + let(:resource) { build_stubbed(:ci_pipeline, project: project, user: user) } + + it 'serializes the pipeline object' do + expect(data).to include( + id: resource.id, + user: a_hash_including(id: user.id), + active: resource.active?, + source: resource.source + ) + end + + it 'includes expected keys' do + expect(data.keys).to include(:id, :iid, :user, :active, :source, :created_at, :updated_at, :path, :flags) + end + + it 'includes pipeline path as string' do + expect(data[:path]).to be_a(String) + expect(data[:path]).to include("/pipelines/#{resource.id}") + end + end + + context 'when multiple objects are being serialized' do + let(:resource) { build_stubbed_pair(:ci_pipeline, project: project, user: user) } + + it 'serializes the array of pipelines' do + expect(data).to contain_exactly( + a_hash_including(id: resource.first.id), + a_hash_including(id: resource.last.id) + ) + end + + it 'includes pipeline paths for all pipelines' do + expect(data.first[:path]).to be_a(String) + expect(data.last[:path]).to be_a(String) + expect(data.first[:path]).to include("/pipelines/#{resource.first.id}") + expect(data.last[:path]).to include("/pipelines/#{resource.last.id}") + end + end + + context 'with options' do + let(:resource) { build_stubbed(:ci_pipeline, project: project, user: user) } + + context 'when coverage is disabled' do + subject(:data) { serializer.represent(resource, disable_coverage: true) } + + it 'does not include coverage field' do + expect(data).not_to have_key(:coverage) + end + end + + context 'when coverage is enabled by default' do + it 'includes coverage field even when nil' do + expect(data).to have_key(:coverage) + end + end + end + + context 'with AI serializer mode' do + let(:resource) { build_stubbed(:ci_pipeline, project: project, user: user) } + let(:ai_resource) { instance_double(Ai::AiResource::Ci::Pipeline) } + + subject(:data) do + serializer.represent(resource, { + serializer: 'ai', + resource: ai_resource, + content_limit: 50000 + }) + end + + it 'serializes the pipeline for AI usage' do + expect(data).to include( + id: resource.id, + user: a_hash_including(id: user.id), + source: resource.source + ) + end + end + end +end