From e729876bc52bb0e810c4e61d5d9967c27e858df8 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Mon, 22 Apr 2024 13:13:48 +0200 Subject: [PATCH 01/27] Perform chat requests via new endpoint Introducing /v2/chat/agent AI Gateway endpoint. We are going to assemble ReAct prompt on Gateway, so we're going to do it via a new endpoint. Changelog: added EE: true --- ...hat_react_prompt_on_gateway_experiment.yml | 9 + .../chain/agents/single_action_executor.rb | 195 ++++++++++++++++++ ee/lib/gitlab/llm/chain/answers/chat.rb | 49 +++++ .../gitlab/llm/chain/answers/streamed_json.rb | 48 +++++ .../gitlab/llm/chain/concerns/ai_dependent.rb | 7 +- .../llm/chain/parsers/single_action_parser.rb | 60 ++++++ .../gitlab/llm/chain/requests/ai_gateway.rb | 32 ++- ee/lib/gitlab/llm/completions/chat.rb | 27 ++- 8 files changed, 418 insertions(+), 9 deletions(-) create mode 100644 ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml create mode 100644 ee/lib/gitlab/llm/chain/agents/single_action_executor.rb create mode 100644 ee/lib/gitlab/llm/chain/answers/chat.rb create mode 100644 ee/lib/gitlab/llm/chain/answers/streamed_json.rb create mode 100644 ee/lib/gitlab/llm/chain/parsers/single_action_parser.rb diff --git a/ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml b/ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml new file mode 100644 index 00000000000000..b10a20742b89f2 --- /dev/null +++ b/ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml @@ -0,0 +1,9 @@ +--- +name: chat_react_prompt_on_gateway_experiment +feature_issue_url: +introduced_by_url: +rollout_issue_url: +milestone: '16.11' +group: group::duo chat +type: wip +default_enabled: false diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb new file mode 100644 index 00000000000000..46f8aa1895f5fe --- /dev/null +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -0,0 +1,195 @@ +# frozen_string_literal: true + +module Gitlab + module Llm + module Chain + module Agents + class SingleActionExecutor + include Gitlab::Utils::StrongMemoize + include Concerns::AiDependent + include Langsmith::RunHelpers + + attr_reader :tools, :user_input, :context, :response_handler + attr_accessor :iterations + + MAX_ITERATIONS = 10 + RESPONSE_TYPE_TOOL = 'tool' + + # @param [String] user_input - a question from a user + # @param [Array] tools - an array of Tools defined in the tools module. + # @param [GitlabContext] context - Gitlab context containing useful context information + # @param [ResponseService] response_handler - Handles returning the response to the client + # @param [ResponseService] stream_response_handler - Handles streaming chunks to the client + def initialize(user_input:, tools:, context:, response_handler:, stream_response_handler: nil) + @user_input = user_input + @tools = tools + @context = context + @iterations = 0 + @logger = Gitlab::Llm::Logger.build + @response_handler = response_handler + @stream_response_handler = stream_response_handler + end + + def execute + @agent_scratchpad = [] + MAX_ITERATIONS.times do + step = {} + thoughts = execute_streamed_request + + answer = Answers::Chat.from_response(response_body: thoughts, tools: tools, context: context) + + return answer if answer.is_final? + + step[:thought] = answer.suggestions + step[:tool] = answer.tool + step[:tool_input] = user_input + + tool_class = answer.tool + + picked_tool_action(tool_class) + + tool = tool_class.new( + context: context, + options: { + input: user_input, + suggestions: answer.suggestions # was options[:agent_scratchpad] + }, + stream_response_handler: stream_response_handler + ) + + tool_answer = tool.execute + + return tool_answer if tool_answer.is_final? + + step[:observation] = tool_answer.content.strip + @agent_scratchpad.push(step) + end + + Answer.default_final_answer(context: context) + rescue Net::ReadTimeout => error + Gitlab::ErrorTracking.track_exception(error) + Answer.error_answer( + context: context, + content: _("GitLab Duo didn't respond. Try again? If it fails again, your request might be too large.") + ) + rescue Gitlab::Llm::AiGateway::Client::ConnectionError => error + Gitlab::ErrorTracking.track_exception(error) + Answer.error_answer( + context: context, + content: _("GitLab Duo could not connect to the AI provider.") + ) + end + traceable :execute, name: 'Run ReAct' + + private + + def streamed_content(_content, chunk) + chunk[:content] + end + + def execute_streamed_request + request(&streamed_request_handler(Answers::StreamedJson.new)) + end + + attr_reader :logger, :stream_response_handler + + # This method should not be memoized because the input variables change over time + def prompt + { prompt: user_input, options: prompt_options } + end + + def options + @options ||= { + agent_scratchpad: @agent_scratchpad, + conversation: conversation, + current_resource: current_resource, + current_code: current_code, + hello_this_is_chat: true + } + end + + def picked_tool_action(tool_class) + logger.info(message: "Picked tool", tool: tool_class.to_s) + + response_handler.execute( + response: Gitlab::Llm::Chain::ToolResponseModifier.new(tool_class), + options: { role: ::Gitlab::Llm::AiMessage::ROLE_SYSTEM, + type: RESPONSE_TYPE_TOOL } + ) + + # We need to stream the response for clients that already migrated to use `ai_action` and no longer + # use `resource_id` as an identifier. Once streaming is enabled and all clients migrated, we can + # remove the `response_handler` call above. + return unless stream_response_handler + + stream_response_handler.execute( + response: Gitlab::Llm::Chain::ToolResponseModifier.new(tool_class), + options: { + role: ::Gitlab::Llm::ChatMessage::ROLE_SYSTEM, + type: RESPONSE_TYPE_TOOL + } + ) + end + + def available_resources_names + tools.filter_map do |tool_class| + tool_class::Executor::RESOURCE_NAME.pluralize if tool_class::Executor::RESOURCE_NAME.present? + end.join(', ') + end + strong_memoize_attr :available_resources_names + + # agent_version is deprecated, Chat conversation doesn't have this param anymore + def last_conversation + ChatStorage.new(context.current_user, nil).last_conversation + end + strong_memoize_attr :last_conversation + + def conversation + # include only messages with successful response and reorder + # messages so each question is followed by its answer + by_request = last_conversation + .reject { |message| message.errors.present? } + .group_by(&:request_id) + .select { |_uuid, messages| messages.size > 1 } + + c = by_request.values.sort_by { |messages| messages.first.timestamp }.flatten + + return [] if c.blank? + + c = c.last(50).map do |message, _| + { role: message.role.to_sym, content: message.content } + end + + c.to_s + end + + def current_code + file_context = current_file_context + return provider_prompt_class.current_selection_prompt(file_context) if file_context + + blob = @context.extra_resource[:blob] + return "" unless blob + + provider_prompt_class.current_blob_prompt(blob) + end + + def current_file_context + return unless context.current_file[:selected_text].present? + + context.current_file + end + + def prompt_options + options + end + + def current_resource + context.current_page_short_description + rescue ArgumentError + "" + end + end + end + end + end +end diff --git a/ee/lib/gitlab/llm/chain/answers/chat.rb b/ee/lib/gitlab/llm/chain/answers/chat.rb new file mode 100644 index 00000000000000..88137eb29d974a --- /dev/null +++ b/ee/lib/gitlab/llm/chain/answers/chat.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +module Gitlab + module Llm + module Chain + module Answers + class Chat + extend Langsmith::RunHelpers + + def self.from_response(response_body:, tools:, context:) + parser = Parsers::SingleActionParser.new(output: response_body) + parser.parse + + return Answer.final_answer(context: context, content: parser.final_answer) if parser.final_answer + + executor = nil + action = parser.action + action_input = parser.action_input + thought = parser.thought + content = "\nAction: #{action}\nAction Input: #{action_input}\n" + + if tools.present? + tool = tools.find { |tool_class| tool_class::Executor::NAME == action } + executor = tool::Executor if tool + + return Answer.default_final_answer(context: context) unless tool + end + + logger.info_or_debug(context.current_user, message: "Answer", content: content) + + Answer.new( + status: :ok, + context: context, + content: content, + tool: executor, + suggestions: thought, + is_final: false + ) + end + traceable :from_response, name: 'Get answer from response', run_type: 'parser', class_method: true + + private_class_method def self.logger + Gitlab::Llm::Logger.build + end + end + end + end + end +end diff --git a/ee/lib/gitlab/llm/chain/answers/streamed_json.rb b/ee/lib/gitlab/llm/chain/answers/streamed_json.rb new file mode 100644 index 00000000000000..8b8ae32f7dd623 --- /dev/null +++ b/ee/lib/gitlab/llm/chain/answers/streamed_json.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +module Gitlab + module Llm + module Chain + module Answers + class StreamedJson < StreamedAnswer + def initialize + @final_answer_started = false + @full_message = '' + + super + end + + def next_chunk(content) + return if content.empty? + content = JSON.parse(content) # need to handle parsing error + + # If it already contains the final answer, we can return the content directly. + # There is then also no longer the need to build the full message. + answer_chunk = final_answer_chunk(content) + return unless answer_chunk + return payload(answer_chunk) if final_answer_started + + @full_message += answer_chunk + + return unless final_answer_start(content) + + @final_answer_started = true + payload(answer_chunk) + end + + private + + attr_accessor :full_message, :final_answer_started + + def final_answer_start(content) + 'final_answer_delta' == content.dig('type') + end + + def final_answer_chunk(content) + content.dig('data', 'text') + end + end + end + end + end +end diff --git a/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb b/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb index a645bc9a1fcc66..38b6edcd0a4478 100644 --- a/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb +++ b/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb @@ -34,7 +34,7 @@ def streamed_request_handler(streamed_answer) if chunk stream_response_handler.execute( - response: Gitlab::Llm::Chain::StreamedResponseModifier.new(content, chunk_id: chunk[:id]), + response: Gitlab::Llm::Chain::StreamedResponseModifier.new(streamed_content(content, chunk), chunk_id: chunk[:id]), options: { chunk_id: chunk[:id] } ) end @@ -56,6 +56,11 @@ def provider_prompt_class def unit_primitive nil end + + #duo chat modification + def streamed_content(content, chunk) + content + end end end end diff --git a/ee/lib/gitlab/llm/chain/parsers/single_action_parser.rb b/ee/lib/gitlab/llm/chain/parsers/single_action_parser.rb new file mode 100644 index 00000000000000..d89e3900488cfc --- /dev/null +++ b/ee/lib/gitlab/llm/chain/parsers/single_action_parser.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module Gitlab + module Llm + module Chain + module Parsers + class SingleActionParser < OutputParser + attr_reader :action, :action_input, :thought, :final_answer + + def parse + return unless @output + + @parsed_thoughts = parse_json_objects + + return unless @parsed_thoughts.present? + + parse_final_answer + parse_action + end + + private + + def final_answer? + @parsed_thoughts.first[:type] == 'final_answer_delta' + end + + def parse_final_answer + return unless final_answer? + + @final_answer = '' + + @parsed_thoughts.each do |t| + @final_answer += t[:data][:text] + end + + @final_answer + end + + def parse_action + response = @parsed_thoughts.first + + return unless response[:type] == 'action' + + @thought = response[:data][:thought] + @action = response[:data][:tool].camelcase + @action_input = response[:data][:tool_input] + end + + def parse_json_objects + json_strings = @output.split("\n") + + json_strings.map do |str| + Gitlab::Json.parse(str).with_indifferent_access + end + end + end + end + end + end +end diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index 33b5dd56e93d49..aaf0c2cce1fcda 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -15,6 +15,7 @@ class AiGateway < Base ENDPOINT = '/v1/chat/agent' BASE_ENDPOINT = '/v1/chat' + CHAT_V2_ENDPOINT = '/v2/chat/agent' DEFAULT_TYPE = 'prompt' DEFAULT_SOURCE = 'GitLab EE' TEMPERATURE = 0.1 @@ -33,7 +34,19 @@ def request(prompt, unit_primitive: nil) options = default_options.merge(prompt.fetch(:options, {})) return unless model_provider_valid?(options) - body = request_body(prompt: prompt[:prompt], options: options) + v2_chat_schema = Feature.enabled?(:chat_react_prompt_on_gateway_experiment, user) && options.delete(:hello_this_is_chat) + + body = if v2_chat_schema + request_body_chat_2(prompt: prompt[:prompt], options: options) + else + request_body(prompt: prompt[:prompt], options: options) + end + + endpoint = if v2_chat_schema + CHAT_V2_ENDPOINT + else + ENDPOINT + end response = ai_client.stream( endpoint: endpoint(unit_primitive), @@ -118,6 +131,23 @@ def model_params(options) end end + def request_body_chat_2(prompt:, options: {}) + { + prompt: prompt, + options: { + chat_history: "", + agent_scratchpad: { + agent_type: "react", + steps: options[:agent_scratchpad] + }, + context: { + type: "issue", + content: "string" + } + } + } + end + def payload_params(options) allowed_params = ALLOWED_PARAMS.fetch(provider(options)) params = options.slice(*allowed_params) diff --git a/ee/lib/gitlab/llm/completions/chat.rb b/ee/lib/gitlab/llm/completions/chat.rb index 066b770835586a..63e38acb8a3074 100644 --- a/ee/lib/gitlab/llm/completions/chat.rb +++ b/ee/lib/gitlab/llm/completions/chat.rb @@ -23,6 +23,9 @@ class Chat < Base ::Gitlab::Llm::Chain::Tools::ExplainVulnerability ].freeze + # @param [Gitlab::Llm::AiMessage] prompt_message - user question + # @param [NilClass] ai_prompt_class - not used for chat + # @param [Hash] options - additional context def initialize(prompt_message, ai_prompt_class, options = {}) super @@ -109,13 +112,23 @@ def agent_or_tool_response(response_handler) end def execute_with_tool_chosen_by_ai(response_handler, stream_response_handler) - Gitlab::Llm::Chain::Agents::ZeroShot::Executor.new( - user_input: prompt_message.content, - tools: tools, - context: context, - response_handler: response_handler, - stream_response_handler: stream_response_handler - ).execute + if Feature.enabled?(:chat_react_prompt_on_gateway_experiment, user) + Gitlab::Llm::Chain::Agents::SingleActionExecutor.new( + user_input: prompt_message.content, + tools: tools, + context: context, + response_handler: response_handler, + stream_response_handler: stream_response_handler + ).execute + else + Gitlab::Llm::Chain::Agents::ZeroShot::Executor.new( + user_input: prompt_message.content, + tools: tools, + context: context, + response_handler: response_handler, + stream_response_handler: stream_response_handler + ).execute + end end def execute_with_slash_command_tool(stream_response_handler) -- GitLab From 5868ec90d3ad3a208cec9bfa00549f8ab1fa798c Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Tue, 11 Jun 2024 18:04:38 +0200 Subject: [PATCH 02/27] Prepare feature flag info --- .../wip/chat_react_prompt_on_gateway_experiment.yml | 9 --------- .../feature_flags/wip/v2_chat_agent_integration.yml | 9 +++++++++ ee/lib/gitlab/llm/chain/requests/ai_gateway.rb | 2 +- ee/lib/gitlab/llm/completions/chat.rb | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) delete mode 100644 ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml create mode 100644 ee/config/feature_flags/wip/v2_chat_agent_integration.yml diff --git a/ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml b/ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml deleted file mode 100644 index b10a20742b89f2..00000000000000 --- a/ee/config/feature_flags/wip/chat_react_prompt_on_gateway_experiment.yml +++ /dev/null @@ -1,9 +0,0 @@ ---- -name: chat_react_prompt_on_gateway_experiment -feature_issue_url: -introduced_by_url: -rollout_issue_url: -milestone: '16.11' -group: group::duo chat -type: wip -default_enabled: false diff --git a/ee/config/feature_flags/wip/v2_chat_agent_integration.yml b/ee/config/feature_flags/wip/v2_chat_agent_integration.yml new file mode 100644 index 00000000000000..0d57d2d202cfbd --- /dev/null +++ b/ee/config/feature_flags/wip/v2_chat_agent_integration.yml @@ -0,0 +1,9 @@ +--- +name: v2_chat_agent_integration +feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/456258 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150529 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/466910 +milestone: '16.11' +group: group::duo chat +type: wip +default_enabled: false diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index aaf0c2cce1fcda..82421cbeb2908e 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -34,7 +34,7 @@ def request(prompt, unit_primitive: nil) options = default_options.merge(prompt.fetch(:options, {})) return unless model_provider_valid?(options) - v2_chat_schema = Feature.enabled?(:chat_react_prompt_on_gateway_experiment, user) && options.delete(:hello_this_is_chat) + v2_chat_schema = Feature.enabled?(:v2_chat_agent_integration, user) && options.delete(:hello_this_is_chat) body = if v2_chat_schema request_body_chat_2(prompt: prompt[:prompt], options: options) diff --git a/ee/lib/gitlab/llm/completions/chat.rb b/ee/lib/gitlab/llm/completions/chat.rb index 63e38acb8a3074..0d98c7d7d1ec96 100644 --- a/ee/lib/gitlab/llm/completions/chat.rb +++ b/ee/lib/gitlab/llm/completions/chat.rb @@ -112,7 +112,7 @@ def agent_or_tool_response(response_handler) end def execute_with_tool_chosen_by_ai(response_handler, stream_response_handler) - if Feature.enabled?(:chat_react_prompt_on_gateway_experiment, user) + if Feature.enabled?(:v2_chat_agent_integration, user) Gitlab::Llm::Chain::Agents::SingleActionExecutor.new( user_input: prompt_message.content, tools: tools, -- GitLab From d1e97e8264395b28d36878f9a749db5f93ca8867 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Thu, 13 Jun 2024 12:01:22 +0200 Subject: [PATCH 03/27] Fix rubocop issues --- ee/lib/gitlab/llm/chain/answers/streamed_json.rb | 7 +++---- ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb | 7 ++++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/answers/streamed_json.rb b/ee/lib/gitlab/llm/chain/answers/streamed_json.rb index 8b8ae32f7dd623..36d5006b5af9b5 100644 --- a/ee/lib/gitlab/llm/chain/answers/streamed_json.rb +++ b/ee/lib/gitlab/llm/chain/answers/streamed_json.rb @@ -14,11 +14,10 @@ def initialize def next_chunk(content) return if content.empty? - content = JSON.parse(content) # need to handle parsing error - # If it already contains the final answer, we can return the content directly. - # There is then also no longer the need to build the full message. + content = ::Gitlab::Json.parse(content) answer_chunk = final_answer_chunk(content) + return unless answer_chunk return payload(answer_chunk) if final_answer_started @@ -35,7 +34,7 @@ def next_chunk(content) attr_accessor :full_message, :final_answer_started def final_answer_start(content) - 'final_answer_delta' == content.dig('type') + 'final_answer_delta' == content['type'] end def final_answer_chunk(content) diff --git a/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb b/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb index 38b6edcd0a4478..92a436ce9782b4 100644 --- a/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb +++ b/ee/lib/gitlab/llm/chain/concerns/ai_dependent.rb @@ -34,7 +34,8 @@ def streamed_request_handler(streamed_answer) if chunk stream_response_handler.execute( - response: Gitlab::Llm::Chain::StreamedResponseModifier.new(streamed_content(content, chunk), chunk_id: chunk[:id]), + response: Gitlab::Llm::Chain::StreamedResponseModifier + .new(streamed_content(content, chunk), chunk_id: chunk[:id]), options: { chunk_id: chunk[:id] } ) end @@ -57,8 +58,8 @@ def unit_primitive nil end - #duo chat modification - def streamed_content(content, chunk) + # This method is modified in SingleActionExecutor for Duo Chat + def streamed_content(content, _chunk) content end end -- GitLab From 11f2e14f065d75ca46eb9439bfc33535e7be54fa Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Thu, 13 Jun 2024 12:05:31 +0200 Subject: [PATCH 04/27] Add new error code to the executor --- ee/lib/gitlab/llm/chain/agents/single_action_executor.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 46f8aa1895f5fe..76202a2cebe7af 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -70,7 +70,8 @@ def execute Gitlab::ErrorTracking.track_exception(error) Answer.error_answer( context: context, - content: _("GitLab Duo didn't respond. Try again? If it fails again, your request might be too large.") + content: _("I'm sorry, I couldn't respond in time. Please try again."), + error_code: 'A1000' ) rescue Gitlab::Llm::AiGateway::Client::ConnectionError => error Gitlab::ErrorTracking.track_exception(error) -- GitLab From 98690cb90ad251f591795f81381ab2eb9380ae3e Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Thu, 13 Jun 2024 17:48:03 +0200 Subject: [PATCH 05/27] Add request test --- .../llm/chain/requests/ai_gateway_spec.rb | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb index 4c2378deed66ba..25206bf99745db 100644 --- a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb @@ -165,5 +165,37 @@ it_behaves_like 'performing request to the AI Gateway' end + + context 'when request is sent for a new ReAct Duo Chat prompt' do + let(:endpoint) { described_class::CHAT_V2_ENDPOINT } + + let(:prompt) { { prompt: user_prompt, options: options } } + + let(:options) do + { + agent_scratchpad: [], + hello_this_is_chat: true + } + end + + let(:body) do + { + prompt: user_prompt, + options: { + chat_history: "", + agent_scratchpad: { + agent_type: "react", + steps: [] + }, + context: { + type: "issue", + content: "string" + } + } + } + end + + it_behaves_like 'performing request to the AI Gateway' + end end end -- GitLab From ccb1ccc7f885612740feb5e0a1d33a709e418873 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Tue, 18 Jun 2024 12:48:11 +0200 Subject: [PATCH 06/27] Add parser tests --- .../parsers/single_action_parser_spec.rb | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 ee/spec/lib/gitlab/llm/chain/parsers/single_action_parser_spec.rb diff --git a/ee/spec/lib/gitlab/llm/chain/parsers/single_action_parser_spec.rb b/ee/spec/lib/gitlab/llm/chain/parsers/single_action_parser_spec.rb new file mode 100644 index 00000000000000..526e5afd1ba128 --- /dev/null +++ b/ee/spec/lib/gitlab/llm/chain/parsers/single_action_parser_spec.rb @@ -0,0 +1,81 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Llm::Chain::Parsers::SingleActionParser, feature_category: :duo_chat do + describe "#parse" do + let(:parser) { described_class.new(output: output) } + let(:output) { chunks.map(&:to_json).join("\n") } + + before do + parser.parse + end + + context "with final answer" do + let(:chunks) do + [ + { + type: "final_answer_delta", + data: { + thought: "Thought: I don't need any specific GitLab resources to answer this.", + text: "To" + } + }, + { type: "final_answer_delta", data: { thought: "", text: " perform" } }, + { type: "final_answer_delta", data: { thought: "", text: " a" } }, + { type: "final_answer_delta", data: { thought: "", text: " Git" } }, + { type: "final_answer_delta", data: { thought: "", text: " re" } }, + { type: "final_answer_delta", data: { thought: "", text: "base" } }, + { type: "final_answer_delta", data: { thought: "", text: "," } } + ] + end + + it "returns only the final answer" do + expect(parser.action).to be_nil + expect(parser.action_input).to be_nil + expect(parser.thought).to be_nil + expect(parser.final_answer).to eq("To perform a Git rebase,") + end + end + + context "with chosen action" do + let(:chunks) do + [ + { + type: "action", + data: { + thought: "Thought: I need to retrieve the issue details using the \"issue_reader\" tool.", + tool: "issue_reader", + tool_input: "What is the title of this issue?" + } + } + ] + end + + it "returns the action" do + expect(parser.action).to eq("IssueReader") + expect(parser.action_input).to eq("What is the title of this issue?") + expect(parser.thought).to eq("Thought: I need to retrieve the issue details using the \"issue_reader\" tool.") + expect(parser.final_answer).to be_nil + end + end + + context "with no output" do + let(:output) { nil } + + it "returns nil" do + expect(parser.action).to be_nil + expect(parser.final_answer).to be_nil + end + end + + context "with empty output" do + let(:output) { "" } + + it "returns nil" do + expect(parser.action).to be_nil + expect(parser.final_answer).to be_nil + end + end + end +end -- GitLab From 279057d14879f569d5608724c1ba9d70e1256c93 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Tue, 18 Jun 2024 17:33:03 +0200 Subject: [PATCH 07/27] Add tests for the streamed answer adapter --- .../llm/chain/answers/streamed_json_spec.rb | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 ee/spec/lib/gitlab/llm/chain/answers/streamed_json_spec.rb diff --git a/ee/spec/lib/gitlab/llm/chain/answers/streamed_json_spec.rb b/ee/spec/lib/gitlab/llm/chain/answers/streamed_json_spec.rb new file mode 100644 index 00000000000000..7dfa14bab38575 --- /dev/null +++ b/ee/spec/lib/gitlab/llm/chain/answers/streamed_json_spec.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Llm::Chain::Answers::StreamedJson, feature_category: :duo_chat do + describe "#next_chunk" do + subject { described_class.new.next_chunk(chunk) } + + context "when stream is empty" do + let(:chunk) { "" } + + it 'returns nil' do + is_expected.to be_nil + end + end + + context "when stream does not contain the final answer" do + let(:chunk) do + { + type: "action", + data: { + thought: "Thought: I need to retrieve the issue content using the \"issue_reader\" tool.", + tool: "issue_reader", + tool_input: "what is the title of this issue" + } + }.to_json + end + + it 'returns nil' do + is_expected.to be_nil + end + end + + context "when streaming beginning of the answer" do + let(:chunk) do + { + type: "final_answer_delta", + data: { + thought: "Thought: I should provide a direct response.", + text: "I" + } + }.to_json + end + + it 'returns stream payload' do + is_expected.to eq({ id: 1, content: "I" }) + end + end + end +end -- GitLab From 78e8f00b644ca65add7f051525ba62c178037f2d Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Tue, 18 Jun 2024 18:29:33 +0200 Subject: [PATCH 08/27] Consolidate usage of Answer --- .../chain/agents/single_action_executor.rb | 9 +++- ee/lib/gitlab/llm/chain/answer.rb | 4 +- ee/lib/gitlab/llm/chain/answers/chat.rb | 49 ------------------- ee/spec/lib/gitlab/llm/chain/answer_spec.rb | 27 ++++++++++ 4 files changed, 36 insertions(+), 53 deletions(-) delete mode 100644 ee/lib/gitlab/llm/chain/answers/chat.rb diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 76202a2cebe7af..99111c7428af5a 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -36,7 +36,12 @@ def execute step = {} thoughts = execute_streamed_request - answer = Answers::Chat.from_response(response_body: thoughts, tools: tools, context: context) + answer = Answer.from_response( + response_body: thoughts, + tools: tools, + context: context, + parser_klass: Parsers::SingleActionParser + ) return answer if answer.is_final? @@ -52,7 +57,7 @@ def execute context: context, options: { input: user_input, - suggestions: answer.suggestions # was options[:agent_scratchpad] + suggestions: answer.suggestions }, stream_response_handler: stream_response_handler ) diff --git a/ee/lib/gitlab/llm/chain/answer.rb b/ee/lib/gitlab/llm/chain/answer.rb index 10c53e3a6e4ad1..e1ee0d86949119 100644 --- a/ee/lib/gitlab/llm/chain/answer.rb +++ b/ee/lib/gitlab/llm/chain/answer.rb @@ -9,8 +9,8 @@ class Answer attr_accessor :status, :content, :context, :tool, :suggestions, :is_final, :extras, :error_code alias_method :is_final?, :is_final - def self.from_response(response_body:, tools:, context:) - parser = Parsers::ChainOfThoughtParser.new(output: response_body) + def self.from_response(response_body:, tools:, context:, parser_klass: Parsers::ChainOfThoughtParser) + parser = parser_klass.new(output: response_body) parser.parse return final_answer(context: context, content: parser.final_answer) if parser.final_answer diff --git a/ee/lib/gitlab/llm/chain/answers/chat.rb b/ee/lib/gitlab/llm/chain/answers/chat.rb deleted file mode 100644 index 88137eb29d974a..00000000000000 --- a/ee/lib/gitlab/llm/chain/answers/chat.rb +++ /dev/null @@ -1,49 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Llm - module Chain - module Answers - class Chat - extend Langsmith::RunHelpers - - def self.from_response(response_body:, tools:, context:) - parser = Parsers::SingleActionParser.new(output: response_body) - parser.parse - - return Answer.final_answer(context: context, content: parser.final_answer) if parser.final_answer - - executor = nil - action = parser.action - action_input = parser.action_input - thought = parser.thought - content = "\nAction: #{action}\nAction Input: #{action_input}\n" - - if tools.present? - tool = tools.find { |tool_class| tool_class::Executor::NAME == action } - executor = tool::Executor if tool - - return Answer.default_final_answer(context: context) unless tool - end - - logger.info_or_debug(context.current_user, message: "Answer", content: content) - - Answer.new( - status: :ok, - context: context, - content: content, - tool: executor, - suggestions: thought, - is_final: false - ) - end - traceable :from_response, name: 'Get answer from response', run_type: 'parser', class_method: true - - private_class_method def self.logger - Gitlab::Llm::Logger.build - end - end - end - end - end -end diff --git a/ee/spec/lib/gitlab/llm/chain/answer_spec.rb b/ee/spec/lib/gitlab/llm/chain/answer_spec.rb index f758247c2475b3..845c0b8e2469a4 100644 --- a/ee/spec/lib/gitlab/llm/chain/answer_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/answer_spec.rb @@ -100,6 +100,33 @@ expect(answer.content).to eq(input) end end + + context 'with different parser' do + subject(:answer) do + described_class.from_response( + response_body: input, + tools: tools, + context: context, + parser_klass: Gitlab::Llm::Chain::Parsers::SingleActionParser + ) + end + + let(:input) do + { + type: "action", + data: { + thought: "Thought: I need to retrieve the issue content using the \"issue_reader\" tool.", + tool: "issue_reader", + tool_input: "what is the title of this issue" + } + }.to_json + end + + it 'returns intermediate answer with parsed values and a tool' do + expect(answer.is_final?).to eq(false) + expect(answer.tool::NAME).to eq('IssueReader') + end + end end describe '.final_answer' do -- GitLab From 44cf4a315e171fc6d7179c5798f63255124fe0df Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 19 Jun 2024 12:18:31 +0200 Subject: [PATCH 09/27] Rename agent param --- ee/lib/gitlab/llm/chain/agents/single_action_executor.rb | 2 +- ee/lib/gitlab/llm/chain/requests/ai_gateway.rb | 2 +- ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 99111c7428af5a..68f485a396f3bf 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -110,7 +110,7 @@ def options conversation: conversation, current_resource: current_resource, current_code: current_code, - hello_this_is_chat: true + single_action_agent: true } end diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index 82421cbeb2908e..cc817da904a5ec 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -34,7 +34,7 @@ def request(prompt, unit_primitive: nil) options = default_options.merge(prompt.fetch(:options, {})) return unless model_provider_valid?(options) - v2_chat_schema = Feature.enabled?(:v2_chat_agent_integration, user) && options.delete(:hello_this_is_chat) + v2_chat_schema = Feature.enabled?(:v2_chat_agent_integration, user) && options.delete(:single_action_agent) body = if v2_chat_schema request_body_chat_2(prompt: prompt[:prompt], options: options) diff --git a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb index 25206bf99745db..8a2492733ef48e 100644 --- a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb @@ -174,7 +174,7 @@ let(:options) do { agent_scratchpad: [], - hello_this_is_chat: true + single_action_agent: true } end -- GitLab From 920dc1e1b4864c9a1d7e1603763bbb9b1787dd38 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 19 Jun 2024 14:23:53 +0200 Subject: [PATCH 10/27] Start adding tests for new agent --- .../chain/agents/single_action_executor.rb | 23 +----- ee/spec/factories/llm/chain/answers.rb | 36 ++++++++++ .../agents/single_action_executor_spec.rb | 72 +++++++++++++++++++ 3 files changed, 109 insertions(+), 22 deletions(-) create mode 100644 ee/spec/factories/llm/chain/answers.rb create mode 100644 ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 68f485a396f3bf..aa25d0f870a797 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -104,12 +104,11 @@ def prompt { prompt: user_input, options: prompt_options } end - def options + def prompt_options @options ||= { agent_scratchpad: @agent_scratchpad, conversation: conversation, current_resource: current_resource, - current_code: current_code, single_action_agent: true } end @@ -169,26 +168,6 @@ def conversation c.to_s end - def current_code - file_context = current_file_context - return provider_prompt_class.current_selection_prompt(file_context) if file_context - - blob = @context.extra_resource[:blob] - return "" unless blob - - provider_prompt_class.current_blob_prompt(blob) - end - - def current_file_context - return unless context.current_file[:selected_text].present? - - context.current_file - end - - def prompt_options - options - end - def current_resource context.current_page_short_description rescue ArgumentError diff --git a/ee/spec/factories/llm/chain/answers.rb b/ee/spec/factories/llm/chain/answers.rb new file mode 100644 index 00000000000000..3b22dde1593622 --- /dev/null +++ b/ee/spec/factories/llm/chain/answers.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :answer, class: '::Gitlab::Llm::Chain::Answer' do + status { :ok } + is_final { false } + gitlab_context { 'context' } + content { 'content' } + tool { nil } + suggestion { nil } + extras { nil } + + trait :final do + is_final { true } + end + + trait :tool do + tool { Gitlab::Llm::Chain::Tools::IssueReader } + suggestion { 'suggestion' } + end + + initialize_with do + new( + status: status, + context: gitlab_context, + content: content, + tool: tool, + suggestions: suggestion, + is_final: is_final, + extras: extras + ) + end + + skip_create + end +end diff --git a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb new file mode 100644 index 00000000000000..26400289a14224 --- /dev/null +++ b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Llm::Chain::Agents::SingleActionExecutor, feature_category: :duo_chat do + describe "#execute" do + subject(:answer) do + described_class.new( + user_input: user_input, + tools: tools, + context: context, + response_handler: response_service_double, + stream_response_handler: stream_response_service_double + ).execute + end + + let_it_be(:user) { build_stubbed(:user) } + + let(:user_input) { 'What is the title of this issue?' } + let(:tools) { [Gitlab::Llm::Chain::Tools::IssueReader] } + let(:tool_double) { instance_double(Gitlab::Llm::Chain::Tools::IssueReader::Executor) } + let(:response_service_double) { instance_double(::Gitlab::Llm::ResponseService) } + let(:stream_response_service_double) { instance_double(::Gitlab::Llm::ResponseService) } + + let(:ai_request_double) { instance_double(Gitlab::Llm::Chain::Requests::AiGateway) } + let(:resource) { create(:issue) } + + let(:context) do + Gitlab::Llm::Chain::GitlabContext.new( + current_user: user, container: nil, resource: resource, ai_request: ai_request_double, + extra_resource: nil, current_file: nil, agent_version: nil + ) + end + + let(:response_double) do + "{\"type\":\"final_answer_delta\",\"data\":{\"thought\":\"Thought: direct answer.\",\"text\":\"To\"}}" + end + + before do + allow(context).to receive(:ai_request).and_return(ai_request_double) + allow(ai_request_double).to receive(:request).and_return(response_double) + allow(::Gitlab::Llm::Chain::Answer).to receive(:from_response).and_return(llm_answer) + end + + context "when answer is final" do + let(:llm_answer) { create(:answer, :final, context: context) } + + it "returns final answer from LLM" do + expect(answer.is_final?).to be_truthy + expect(answer.content).to include("content") + end + end + + context "when tool answer if final" do + let(:llm_answer) { create(:answer, :tool, tool: Gitlab::Llm::Chain::Tools::IssueReader::Executor) } + let(:tool_answer) { create(:answer, :final, content: 'tool answer') } + + before do + allow_next_instance_of(Gitlab::Llm::Chain::Tools::IssueReader::Executor) do |issue_tool| + allow(issue_tool).to receive(:execute).and_return(tool_answer) + end + end + + it "returns tool answer" do + expect(response_service_double).to receive(:execute) + expect(stream_response_service_double).to receive(:execute) + expect(answer.is_final?).to be_truthy + expect(answer.content).to include("tool answer") + end + end + end +end -- GitLab From f6f0ebf119f832e54b7c7220b1d3d5ea1ebd1478 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 19 Jun 2024 15:48:10 +0200 Subject: [PATCH 11/27] Removing non streaming part --- .../gitlab/llm/chain/agents/single_action_executor.rb | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index aa25d0f870a797..96c54efc3975e3 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -116,17 +116,6 @@ def prompt_options def picked_tool_action(tool_class) logger.info(message: "Picked tool", tool: tool_class.to_s) - response_handler.execute( - response: Gitlab::Llm::Chain::ToolResponseModifier.new(tool_class), - options: { role: ::Gitlab::Llm::AiMessage::ROLE_SYSTEM, - type: RESPONSE_TYPE_TOOL } - ) - - # We need to stream the response for clients that already migrated to use `ai_action` and no longer - # use `resource_id` as an identifier. Once streaming is enabled and all clients migrated, we can - # remove the `response_handler` call above. - return unless stream_response_handler - stream_response_handler.execute( response: Gitlab::Llm::Chain::ToolResponseModifier.new(tool_class), options: { -- GitLab From 042eb0dd5b3a6665ee42eacdc2300928788e41aa Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 19 Jun 2024 16:42:00 +0200 Subject: [PATCH 12/27] Update tests for new agent --- .../lib/gitlab/llm/completions/chat_spec.rb | 61 ++++++++++++++++--- 1 file changed, 53 insertions(+), 8 deletions(-) diff --git a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb index 00e649ccb50007..6afd1fe5569ed7 100644 --- a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb +++ b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb @@ -71,7 +71,7 @@ subject { described_class.new(prompt_message, nil, **options).execute } shared_examples 'success' do - xit 'calls the ZeroShot Agent with the right parameters', :snowplow do + xit 'calls the SingleAction Agent with the right parameters', :snowplow do expected_params = [ user_input: content, tools: match_array(tools), @@ -80,7 +80,7 @@ stream_response_handler: stream_response_handler ] - expect_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor, *expected_params) do |instance| + expect_next_instance_of(::Gitlab::Llm::Chain::Agents::SingleActionExecutor, *expected_params) do |instance| expect(instance).to receive(:execute).and_return(answer) end @@ -126,7 +126,7 @@ stream_response_handler: stream_response_handler ] - expect_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor, *expected_params) do |instance| + expect_next_instance_of(::Gitlab::Llm::Chain::Agents::SingleActionExecutor, *expected_params) do |instance| expect(instance).to receive(:execute).and_return(answer) end @@ -157,7 +157,7 @@ end xit 'sends process_gitlab_duo_question snowplow event with value eql 0' do - allow_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor) do |instance| + allow_next_instance_of(::Gitlab::Llm::Chain::Agents::SingleActionExecutor) do |instance| expect(instance).to receive(:execute).and_return(answer) end @@ -218,7 +218,7 @@ stream_response_handler: stream_response_handler ] - expect_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor, *expected_params) do |instance| + expect_next_instance_of(::Gitlab::Llm::Chain::Agents::SingleActionExecutor, *expected_params) do |instance| expect(instance).to receive(:execute).and_return(answer) end expect(response_handler).to receive(:execute) @@ -254,7 +254,7 @@ command: an_instance_of(::Gitlab::Llm::Chain::SlashCommand) } - expect(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor).not_to receive(:new) + expect(::Gitlab::Llm::Chain::Agents::SingleActionExecutor).not_to receive(:new) expect(expected_tool) .to receive(:new).with(expected_params).and_return(executor) @@ -308,7 +308,7 @@ let(:command) { '/explain2' } it 'process the message with zero shot agent' do - expect_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor) do |instance| + expect_next_instance_of(::Gitlab::Llm::Chain::Agents::SingleActionExecutor) do |instance| expect(instance).to receive(:execute).and_return(answer) end expect(::Gitlab::Llm::Chain::Tools::ExplainCode::Executor).not_to receive(:new) @@ -332,7 +332,7 @@ stream_response_handler: stream_response_handler ] - allow_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor, *expected_params) do |instance| + allow_next_instance_of(::Gitlab::Llm::Chain::Agents::SingleActionExecutor, *expected_params) do |instance| allow(instance).to receive(:execute).and_return(answer) end @@ -350,5 +350,50 @@ subject end end + + context 'with disabled v2_chat_agent_integration flag' do + before do + stub_feature_flags(v2_chat_agent_integration: false) + end + + it 'calls the ZeroShot Agent with the right parameters', :snowplow do + expected_params = [ + user_input: content, + tools: match_array(tools), + context: context, + response_handler: response_handler, + stream_response_handler: stream_response_handler + ] + + expect_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor, *expected_params) do |instance| + expect(instance).to receive(:execute).and_return(answer) + end + + expect(response_handler).to receive(:execute) + expect(::Gitlab::Llm::ResponseService).to receive(:new).with(context, { request_id: 'uuid', ai_action: :chat }) + .and_return(response_handler) + expect(::Gitlab::Llm::Chain::GitlabContext).to receive(:new) + .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request, + extra_resource: extra_resource, request_id: 'uuid', current_file: current_file, + agent_version: agent_version) + .and_return(context) + expect(categorize_service).to receive(:execute) + expect(::Llm::ExecuteMethodService).to receive(:new) + .with(user, user, :categorize_question, categorize_service_params) + .and_return(categorize_service) + + subject + + expect_snowplow_event( + category: described_class.to_s, + label: "IssueReader", + action: 'process_gitlab_duo_question', + property: 'uuid', + namespace: container, + user: user, + value: 1 + ) + end + end end end -- GitLab From 5ead5e3da709c0c8747b664e14798abe442dd457 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 19 Jun 2024 17:05:20 +0200 Subject: [PATCH 13/27] Add case for reaching max iterations --- .../agents/single_action_executor_spec.rb | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb index 26400289a14224..587ff3b56c6589 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb @@ -4,19 +4,21 @@ RSpec.describe Gitlab::Llm::Chain::Agents::SingleActionExecutor, feature_category: :duo_chat do describe "#execute" do - subject(:answer) do + subject(:answer) { agent.execute } + + let(:agent) do described_class.new( user_input: user_input, tools: tools, context: context, response_handler: response_service_double, stream_response_handler: stream_response_service_double - ).execute + ) end let_it_be(:user) { build_stubbed(:user) } - let(:user_input) { 'What is the title of this issue?' } + let(:user_input) { 'question?' } let(:tools) { [Gitlab::Llm::Chain::Tools::IssueReader] } let(:tool_double) { instance_double(Gitlab::Llm::Chain::Tools::IssueReader::Executor) } let(:response_service_double) { instance_double(::Gitlab::Llm::ResponseService) } @@ -62,11 +64,28 @@ end it "returns tool answer" do - expect(response_service_double).to receive(:execute) expect(stream_response_service_double).to receive(:execute) expect(answer.is_final?).to be_truthy expect(answer.content).to include("tool answer") end end + + context "when max iteration reached" do + let(:llm_answer) { create(:answer, :tool, tool: Gitlab::Llm::Chain::Tools::IssueReader::Executor) } + + before do + stub_const("#{described_class.name}::MAX_ITERATIONS", 2) + allow(stream_response_service_double).to receive(:execute) + + allow_next_instance_of(Gitlab::Llm::Chain::Tools::IssueReader::Executor) do |issue_tool| + allow(issue_tool).to receive(:execute).and_return(llm_answer) + end + end + + it "returns default answer" do + expect(answer.is_final?).to eq(true) + expect(answer.content).to include(Gitlab::Llm::Chain::Answer.default_final_message) + end + end end end -- GitLab From 4279039fba2218924c6221df3541798e3b0a266a Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 19 Jun 2024 17:16:42 +0200 Subject: [PATCH 14/27] Small clean up --- ee/lib/gitlab/llm/chain/agents/single_action_executor.rb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 96c54efc3975e3..b5e83110fa463d 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -142,9 +142,9 @@ def conversation # include only messages with successful response and reorder # messages so each question is followed by its answer by_request = last_conversation - .reject { |message| message.errors.present? } - .group_by(&:request_id) - .select { |_uuid, messages| messages.size > 1 } + .reject { |message| message.errors.present? } + .group_by(&:request_id) + .select { |_uuid, messages| messages.size > 1 } c = by_request.values.sort_by { |messages| messages.first.timestamp }.flatten -- GitLab From 355e1f13dbbe0ccc87998800c259aa51415d8917 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 19 Jun 2024 17:20:35 +0200 Subject: [PATCH 15/27] Update error codes --- ee/lib/gitlab/llm/chain/agents/single_action_executor.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index b5e83110fa463d..771dc64051f8fe 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -76,13 +76,13 @@ def execute Answer.error_answer( context: context, content: _("I'm sorry, I couldn't respond in time. Please try again."), - error_code: 'A1000' + error_code: "A1000" ) rescue Gitlab::Llm::AiGateway::Client::ConnectionError => error Gitlab::ErrorTracking.track_exception(error) Answer.error_answer( context: context, - content: _("GitLab Duo could not connect to the AI provider.") + error_code: "A1001" ) end traceable :execute, name: 'Run ReAct' -- GitLab From 2f0fafe3ba2ab36b020615275757660b8683ad9a Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Thu, 20 Jun 2024 15:21:29 +0200 Subject: [PATCH 16/27] Add context to sent params Pass content type and short description --- ee/app/models/ai/ai_resource/epic.rb | 4 ++++ ee/app/models/ai/ai_resource/issue.rb | 4 ++++ .../chain/agents/single_action_executor.rb | 19 ++++++++++++++++--- ee/lib/gitlab/llm/chain/gitlab_context.rb | 2 ++ .../gitlab/llm/chain/requests/ai_gateway.rb | 4 ++-- .../gitlab/llm/chain/gitlab_context_spec.rb | 8 ++++++++ .../llm/chain/requests/ai_gateway_spec.rb | 4 +++- 7 files changed, 39 insertions(+), 6 deletions(-) diff --git a/ee/app/models/ai/ai_resource/epic.rb b/ee/app/models/ai/ai_resource/epic.rb index dd5ab3b7b08e0e..89153e894931bf 100644 --- a/ee/app/models/ai/ai_resource/epic.rb +++ b/ee/app/models/ai/ai_resource/epic.rb @@ -15,6 +15,10 @@ def serialize_for_ai(user:, content_limit:) }) end + def current_page_type + "epic" + end + def current_page_sentence <<~SENTENCE The user is currently on a page that displays an epic with a description, comments, etc., which the user might refer to, for example, as 'current', 'this' or 'that'. The data is provided in tags, and if it is sufficient in answering the question, utilize it instead of using the 'EpicReader' tool. diff --git a/ee/app/models/ai/ai_resource/issue.rb b/ee/app/models/ai/ai_resource/issue.rb index f91380d5635ee2..0c653175ffb2f5 100644 --- a/ee/app/models/ai/ai_resource/issue.rb +++ b/ee/app/models/ai/ai_resource/issue.rb @@ -15,6 +15,10 @@ def serialize_for_ai(user:, content_limit:) }) end + def current_page_type + "issue" + end + def current_page_sentence <<~SENTENCE The user is currently on a page that displays an issue with a description, comments, etc., which the user might refer to, for example, as 'current', 'this' or 'that'. The data is provided in tags, and if it is sufficient in answering the question, utilize it instead of using the 'IssueReader' tool. diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 771dc64051f8fe..e3b6e96063a705 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -108,7 +108,8 @@ def prompt_options @options ||= { agent_scratchpad: @agent_scratchpad, conversation: conversation, - current_resource: current_resource, + current_resource_type: current_resource_type, + current_resource_content: current_resource_content, single_action_agent: true } end @@ -157,11 +158,23 @@ def conversation c.to_s end - def current_resource - context.current_page_short_description + def current_resource_type + context.current_page_type + end + + def current_resource_content + if use_experimental_prompt? + context.current_page_experimental_short_description + else + context.current_page_short_description + end rescue ArgumentError "" end + + def use_experimental_prompt? + Feature.enabled?(:prevent_issue_epic_search, context.current_user) + end end end end diff --git a/ee/lib/gitlab/llm/chain/gitlab_context.rb b/ee/lib/gitlab/llm/chain/gitlab_context.rb index e5970f0e81efb9..a5ac2aeccd23aa 100644 --- a/ee/lib/gitlab/llm/chain/gitlab_context.rb +++ b/ee/lib/gitlab/llm/chain/gitlab_context.rb @@ -7,6 +7,8 @@ class GitlabContext attr_accessor :current_user, :container, :resource, :ai_request, :tools_used, :extra_resource, :request_id, :current_file, :agent_version + delegate :current_page_type, to: :authorized_resource + def initialize( current_user:, container:, resource:, ai_request:, extra_resource: {}, request_id: nil, current_file: {}, agent_version: nil diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index cc817da904a5ec..df28429e0ee439 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -141,8 +141,8 @@ def request_body_chat_2(prompt:, options: {}) steps: options[:agent_scratchpad] }, context: { - type: "issue", - content: "string" + type: options[:current_resource_type], + content: options[:current_resource_content] } } } diff --git a/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb b/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb index 349acab2fbd4b6..0100995a783593 100644 --- a/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb @@ -24,6 +24,14 @@ group.namespace_settings.update!(experiment_features_enabled: true) end + describe '#current_page_type' do + let(:resource) { create(:issue, project: project) } + + it 'delegates to ai resource' do + expect(context.current_page_type).to eq("issue") + end + end + describe '#resource_serialized' do let(:content_limit) { 500 } diff --git a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb index 8a2492733ef48e..cb75c926190a48 100644 --- a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb @@ -174,7 +174,9 @@ let(:options) do { agent_scratchpad: [], - single_action_agent: true + single_action_agent: true, + current_resource_type: "issue", + current_resource_content: "string" } end -- GitLab From 1c7a8b123975d3650a6a3c6a604f7a772173eb0c Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Thu, 20 Jun 2024 15:24:57 +0200 Subject: [PATCH 17/27] Update time of the introduction of this feature --- ee/config/feature_flags/wip/v2_chat_agent_integration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/config/feature_flags/wip/v2_chat_agent_integration.yml b/ee/config/feature_flags/wip/v2_chat_agent_integration.yml index 0d57d2d202cfbd..3d2c99a82ea569 100644 --- a/ee/config/feature_flags/wip/v2_chat_agent_integration.yml +++ b/ee/config/feature_flags/wip/v2_chat_agent_integration.yml @@ -3,7 +3,7 @@ name: v2_chat_agent_integration feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/456258 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150529 rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/466910 -milestone: '16.11' +milestone: '17.2' group: group::duo chat type: wip default_enabled: false -- GitLab From 4f5e4dd36d57910f15730c0b1cd023c8cabeb9e0 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Tue, 25 Jun 2024 15:09:34 +0200 Subject: [PATCH 18/27] Handle case when resource isn't authorized --- .../llm/chain/agents/single_action_executor.rb | 4 +++- ee/lib/gitlab/llm/chain/gitlab_context.rb | 12 +++--------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index e3b6e96063a705..c74bbd9df7869f 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -158,8 +158,10 @@ def conversation c.to_s end + # TODO: remove issue condition when next issue is implemented + # https://gitlab.com/gitlab-org/gitlab/-/issues/468905 def current_resource_type - context.current_page_type + context.current_page_type || "issue" end def current_resource_content diff --git a/ee/lib/gitlab/llm/chain/gitlab_context.rb b/ee/lib/gitlab/llm/chain/gitlab_context.rb index a5ac2aeccd23aa..c57862a4253255 100644 --- a/ee/lib/gitlab/llm/chain/gitlab_context.rb +++ b/ee/lib/gitlab/llm/chain/gitlab_context.rb @@ -7,7 +7,9 @@ class GitlabContext attr_accessor :current_user, :container, :resource, :ai_request, :tools_used, :extra_resource, :request_id, :current_file, :agent_version - delegate :current_page_type, to: :authorized_resource + delegate :current_page_type, :current_page_sentence, :current_page_short_description, + :current_page_experimental_short_description, + to: :authorized_resource, allow_nil: true def initialize( current_user:, container:, resource:, ai_request:, extra_resource: {}, request_id: nil, @@ -24,14 +26,6 @@ def initialize( @agent_version = agent_version end - def current_page_sentence - authorized_resource&.current_page_sentence - end - - def current_page_short_description - authorized_resource&.current_page_short_description - end - def resource_serialized(content_limit:) return '' unless authorized_resource -- GitLab From d08efeaf10a90c83a4ae30c20fc0294b8e742008 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Tue, 25 Jun 2024 17:00:45 +0200 Subject: [PATCH 19/27] Fix feature test --- ee/spec/features/duo_chat_spec.rb | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/ee/spec/features/duo_chat_spec.rb b/ee/spec/features/duo_chat_spec.rb index 24ae5a84aabc77..59bd023a57e4c4 100644 --- a/ee/spec/features/duo_chat_spec.rb +++ b/ee/spec/features/duo_chat_spec.rb @@ -34,13 +34,8 @@ let(:chat_response) { "Final Answer: #{answer}" } before do - # TODO: Switch to AI Gateway - # See https://gitlab.com/gitlab-org/gitlab/-/issues/431563 - stub_request(:post, "https://api.anthropic.com/v1/complete") - .to_return( - status: 200, body: { completion: "question_category" }.to_json, - headers: { 'Content-Type' => 'application/json' } - ) + # TODO: remove with https://gitlab.com/gitlab-org/gitlab/-/issues/456258 + stub_feature_flags(v2_chat_agent_integration: false) stub_request(:post, "#{Gitlab::AiGateway.url}/v1/chat/agent") .with(body: hash_including({ "stream" => true })) -- GitLab From d943abb345050280089a8da325894c3b3297e8cb Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 26 Jun 2024 14:20:01 +0200 Subject: [PATCH 20/27] Fix merge conflicts --- .../gitlab/llm/chain/agents/single_action_executor.rb | 10 +--------- ee/lib/gitlab/llm/chain/gitlab_context.rb | 1 - 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index c74bbd9df7869f..08426c5b09eb0e 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -165,18 +165,10 @@ def current_resource_type end def current_resource_content - if use_experimental_prompt? - context.current_page_experimental_short_description - else - context.current_page_short_description - end + context.current_page_short_description rescue ArgumentError "" end - - def use_experimental_prompt? - Feature.enabled?(:prevent_issue_epic_search, context.current_user) - end end end end diff --git a/ee/lib/gitlab/llm/chain/gitlab_context.rb b/ee/lib/gitlab/llm/chain/gitlab_context.rb index c57862a4253255..9a9cfb94da94de 100644 --- a/ee/lib/gitlab/llm/chain/gitlab_context.rb +++ b/ee/lib/gitlab/llm/chain/gitlab_context.rb @@ -8,7 +8,6 @@ class GitlabContext :current_file, :agent_version delegate :current_page_type, :current_page_sentence, :current_page_short_description, - :current_page_experimental_short_description, to: :authorized_resource, allow_nil: true def initialize( -- GitLab From b494b96a8b040f833fff709f6181b8c7b83cb24d Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Wed, 26 Jun 2024 17:47:49 +0200 Subject: [PATCH 21/27] Temporary test fixes --- ee/lib/gitlab/llm/chain/agents/single_action_executor.rb | 2 ++ ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb | 3 +++ .../gitlab/llm/chain/agents/zero_shot/qa_evaluation_spec.rb | 1 + 3 files changed, 6 insertions(+) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 08426c5b09eb0e..0883ad4f0dbd92 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -162,6 +162,8 @@ def conversation # https://gitlab.com/gitlab-org/gitlab/-/issues/468905 def current_resource_type context.current_page_type || "issue" + rescue ArgumentError + "" end def current_resource_content diff --git a/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb b/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb index cca37eec78e919..a1a707dded293d 100644 --- a/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb @@ -1,5 +1,8 @@ # frozen_string_literal: true +# Deprecation: this executor will be removed in favor of SingleActionExecutor +# see https://gitlab.com/gitlab-org/gitlab/-/issues/469087 + module Gitlab module Llm module Chain diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/qa_evaluation_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/qa_evaluation_spec.rb index e2b017ccc23566..387f9c0ebde5fd 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/qa_evaluation_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/qa_evaluation_spec.rb @@ -14,6 +14,7 @@ let_it_be(:issue_fixtures) { load_fixture('issues') } before_all do + stub_feature_flags(v2_chat_agent_integration: false) # link_reference_pattern is memoized for Issue # and stubbed url (gitlab.com) is not used to derive the link reference pattern. Issue.instance_variable_set(:@link_reference_pattern, nil) -- GitLab From 658f006ee60af33ba2350b64161f79e69f808260 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Thu, 27 Jun 2024 13:00:32 +0200 Subject: [PATCH 22/27] Fix merge conflicts --- ee/lib/gitlab/llm/chain/requests/ai_gateway.rb | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index df28429e0ee439..3035087ffc2d91 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -42,14 +42,8 @@ def request(prompt, unit_primitive: nil) request_body(prompt: prompt[:prompt], options: options) end - endpoint = if v2_chat_schema - CHAT_V2_ENDPOINT - else - ENDPOINT - end - response = ai_client.stream( - endpoint: endpoint(unit_primitive), + endpoint: endpoint(unit_primitive, v2_chat_schema), body: body ) do |data| yield data if block_given? @@ -89,9 +83,11 @@ def model_provider_valid?(options) provider(options) end - def endpoint(unit_primitive) + def endpoint(unit_primitive, v2_chat_schema) if unit_primitive.present? "#{BASE_ENDPOINT}/#{unit_primitive}" + elsif v2_chat_schema + CHAT_V2_ENDPOINT else ENDPOINT end -- GitLab From 15824300699cecc6300a4b50b99b4c987b4eb511 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Thu, 27 Jun 2024 15:06:08 +0200 Subject: [PATCH 23/27] Temporary skip test --- ee/spec/lib/gitlab/llm/completions/chat_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb index 6afd1fe5569ed7..6aab877abf12d4 100644 --- a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb +++ b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb @@ -356,7 +356,7 @@ stub_feature_flags(v2_chat_agent_integration: false) end - it 'calls the ZeroShot Agent with the right parameters', :snowplow do + xit 'calls the ZeroShot Agent with the right parameters', :snowplow do expected_params = [ user_input: content, tools: match_array(tools), -- GitLab From 309fb04a258e7c0ee7648f059834651a3404fe03 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Fri, 28 Jun 2024 11:18:12 +0200 Subject: [PATCH 24/27] Add missing tests Remove unused method and add missed tests for blind spots found by the undercoverage --- .../chain/agents/single_action_executor.rb | 7 -- .../agents/single_action_executor_spec.rb | 101 ++++++++++++++++-- ee/spec/models/ai/ai_resource/epic_spec.rb | 6 ++ ee/spec/models/ai/ai_resource/issue_spec.rb | 6 ++ 4 files changed, 106 insertions(+), 14 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 0883ad4f0dbd92..ca27b2c0d50581 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -126,13 +126,6 @@ def picked_tool_action(tool_class) ) end - def available_resources_names - tools.filter_map do |tool_class| - tool_class::Executor::RESOURCE_NAME.pluralize if tool_class::Executor::RESOURCE_NAME.present? - end.join(', ') - end - strong_memoize_attr :available_resources_names - # agent_version is deprecated, Chat conversation doesn't have this param anymore def last_conversation ChatStorage.new(context.current_user, nil).last_conversation diff --git a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb index 587ff3b56c6589..18e17d588bdff6 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb @@ -34,22 +34,49 @@ ) end - let(:response_double) do - "{\"type\":\"final_answer_delta\",\"data\":{\"thought\":\"Thought: direct answer.\",\"text\":\"To\"}}" + let(:answer_chunk) do + "{\"type\":\"final_answer_delta\",\"data\":{\"thought\":\"Thought: direct answer.\",\"text\":\"Ans\"}}" end before do allow(context).to receive(:ai_request).and_return(ai_request_double) - allow(ai_request_double).to receive(:request).and_return(response_double) - allow(::Gitlab::Llm::Chain::Answer).to receive(:from_response).and_return(llm_answer) + allow(ai_request_double).to receive(:request).and_return(answer_chunk) end context "when answer is final" do - let(:llm_answer) { create(:answer, :final, context: context) } + let(:another_chunk) do + "{\"type\":\"final_answer_delta\",\"data\":{\"thought\":\"\",\"text\":\"wer\"}}" + end + + let(:response_double) do + "#{answer_chunk}\n#{another_chunk}" + end + + let(:first_response_double) { double } + let(:second_response_double) { double } + + before do + allow(ai_request_double).to receive(:request).and_yield(answer_chunk) + .and_yield(another_chunk) + .and_return(response_double) + allow(Gitlab::Llm::Chain::StreamedResponseModifier).to receive(:new).with("Ans", { chunk_id: 1 }) + .and_return(first_response_double) + allow(Gitlab::Llm::Chain::StreamedResponseModifier).to receive(:new).with("wer", { chunk_id: 2 }) + .and_return(second_response_double) + end + + it "streams final answer" do + expect(stream_response_service_double).to receive(:execute).with( + response: first_response_double, + options: { chunk_id: 1 } + ) + expect(stream_response_service_double).to receive(:execute).with( + response: second_response_double, + options: { chunk_id: 2 } + ) - it "returns final answer from LLM" do expect(answer.is_final?).to be_truthy - expect(answer.content).to include("content") + expect(answer.content).to include("Answer") end end @@ -58,6 +85,8 @@ let(:tool_answer) { create(:answer, :final, content: 'tool answer') } before do + allow(::Gitlab::Llm::Chain::Answer).to receive(:from_response).and_return(llm_answer) + allow_next_instance_of(Gitlab::Llm::Chain::Tools::IssueReader::Executor) do |issue_tool| allow(issue_tool).to receive(:execute).and_return(tool_answer) end @@ -76,6 +105,7 @@ before do stub_const("#{described_class.name}::MAX_ITERATIONS", 2) allow(stream_response_service_double).to receive(:execute) + allow(::Gitlab::Llm::Chain::Answer).to receive(:from_response).and_return(llm_answer) allow_next_instance_of(Gitlab::Llm::Chain::Tools::IssueReader::Executor) do |issue_tool| allow(issue_tool).to receive(:execute).and_return(llm_answer) @@ -87,5 +117,62 @@ expect(answer.content).to include(Gitlab::Llm::Chain::Answer.default_final_message) end end + + context "when times out error is raised" do + let(:error) { Net::ReadTimeout.new } + + before do + allow(Gitlab::ErrorTracking).to receive(:track_exception) + end + + shared_examples "time out error" do + it "returns an error" do + expect(answer.is_final?).to eq(true) + expect(answer.content).to include("I'm sorry, I couldn't respond in time. Please try again.") + expect(answer.error_code).to include("A1000") + expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(error) + end + end + + context "when streamed request times out" do + before do + allow(ai_request_double).to receive(:request).and_raise(error) + end + + it_behaves_like "time out error" + end + + context "when tool times out out" do + let(:llm_answer) { create(:answer, :tool, tool: Gitlab::Llm::Chain::Tools::IssueReader::Executor) } + + before do + allow(ai_request_double).to receive(:request) + allow(::Gitlab::Llm::Chain::Answer).to receive(:from_response).and_return(llm_answer) + allow_next_instance_of(Gitlab::Llm::Chain::Tools::IssueReader::Executor) do |issue_tool| + allow(issue_tool).to receive(:execute).and_raise(error) + end + + allow(stream_response_service_double).to receive(:execute) + end + + it_behaves_like "time out error" + end + end + + context "when connection error is raised" do + let(:error) { ::Gitlab::Llm::AiGateway::Client::ConnectionError.new } + + before do + allow(Gitlab::ErrorTracking).to receive(:track_exception) + allow(ai_request_double).to receive(:request).and_raise(error) + end + + it "returns an error" do + expect(answer.is_final).to eq(true) + expect(answer.content).to include("I'm sorry, I can't generate a response. Please try again.") + expect(answer.error_code).to include("A1001") + expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(error) + end + end end end diff --git a/ee/spec/models/ai/ai_resource/epic_spec.rb b/ee/spec/models/ai/ai_resource/epic_spec.rb index 4113331e6e16bb..fa85be72bbd6df 100644 --- a/ee/spec/models/ai/ai_resource/epic_spec.rb +++ b/ee/spec/models/ai/ai_resource/epic_spec.rb @@ -36,4 +36,10 @@ .not_to include("utilize it instead of using the 'EpicReader' tool") end end + + describe '#current_page_type' do + it 'returns type' do + expect(wrapped_epic.current_page_type).to eq('epic') + end + end end diff --git a/ee/spec/models/ai/ai_resource/issue_spec.rb b/ee/spec/models/ai/ai_resource/issue_spec.rb index e4f6947c1ec237..91e653a9d1f3ea 100644 --- a/ee/spec/models/ai/ai_resource/issue_spec.rb +++ b/ee/spec/models/ai/ai_resource/issue_spec.rb @@ -35,4 +35,10 @@ .not_to include("utilize it instead of using the 'IssueReader' tool") end end + + describe '#current_page_type' do + it 'returns type' do + expect(wrapped_issue.current_page_type).to eq('issue') + end + end end -- GitLab From 24ba5a51d822f92a1a18cf8ed24b09a6e8c50f81 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Fri, 28 Jun 2024 16:02:48 +0200 Subject: [PATCH 25/27] Allow nullify the context resource --- .../chain/agents/single_action_executor.rb | 20 ++++++--- .../agents/single_action_executor_spec.rb | 42 ++++++++++++++++++- 2 files changed, 54 insertions(+), 8 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index ca27b2c0d50581..6cea26509d6489 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -105,14 +105,22 @@ def prompt end def prompt_options - @options ||= { + @options = { agent_scratchpad: @agent_scratchpad, conversation: conversation, - current_resource_type: current_resource_type, - current_resource_content: current_resource_content, single_action_agent: true } + + if current_resource_type + @options.merge!( + current_resource_type: current_resource_type, + current_resource_content: current_resource_content + ) + end + + @options end + strong_memoize_attr :prompt_options def picked_tool_action(tool_class) logger.info(message: "Picked tool", tool: tool_class.to_s) @@ -154,15 +162,15 @@ def conversation # TODO: remove issue condition when next issue is implemented # https://gitlab.com/gitlab-org/gitlab/-/issues/468905 def current_resource_type - context.current_page_type || "issue" + context.current_page_type rescue ArgumentError - "" + nil end def current_resource_content context.current_page_short_description rescue ArgumentError - "" + nil end end end diff --git a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb index 18e17d588bdff6..750ade81768e6d 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb @@ -16,7 +16,9 @@ ) end - let_it_be(:user) { build_stubbed(:user) } + let_it_be(:issue) { build_stubbed(:issue) } + let_it_be(:resource) { issue } + let_it_be(:user) { issue.author } let(:user_input) { 'question?' } let(:tools) { [Gitlab::Llm::Chain::Tools::IssueReader] } @@ -25,7 +27,6 @@ let(:stream_response_service_double) { instance_double(::Gitlab::Llm::ResponseService) } let(:ai_request_double) { instance_double(Gitlab::Llm::Chain::Requests::AiGateway) } - let(:resource) { create(:issue) } let(:context) do Gitlab::Llm::Chain::GitlabContext.new( @@ -63,6 +64,9 @@ .and_return(first_response_double) allow(Gitlab::Llm::Chain::StreamedResponseModifier).to receive(:new).with("wer", { chunk_id: 2 }) .and_return(second_response_double) + + allow(context).to receive(:current_page_type).and_return("issue") + allow(context).to receive(:current_page_short_description).and_return("issue description") end it "streams final answer" do @@ -75,6 +79,20 @@ options: { chunk_id: 2 } ) + expect(ai_request_double).to receive(:request).with( + { + prompt: user_input, + options: { + agent_scratchpad: [], + conversation: [], + single_action_agent: true, + current_resource_type: "issue", + current_resource_content: "issue description" + } + }, + { unit_primitive: nil } + ) + expect(answer.is_final?).to be_truthy expect(answer.content).to include("Answer") end @@ -118,6 +136,26 @@ end end + context "when resource is not authorized" do + let(:resource) { user } + + it "sends request without context" do + expect(ai_request_double).to receive(:request).with( + { + prompt: user_input, + options: { + agent_scratchpad: [], + conversation: [], + single_action_agent: true + } + }, + { unit_primitive: nil } + ) + + agent.execute + end + end + context "when times out error is raised" do let(:error) { Net::ReadTimeout.new } -- GitLab From c2f22e1928fda48fec8dde473effdfb17aa6504b Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Fri, 28 Jun 2024 17:12:31 +0200 Subject: [PATCH 26/27] Nulify context in request --- .../gitlab/llm/chain/requests/ai_gateway.rb | 27 ++++++++++-------- .../llm/chain/requests/ai_gateway_spec.rb | 28 +++++++++++++++++++ 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index 3035087ffc2d91..b64e5f336d64b9 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -128,19 +128,24 @@ def model_params(options) end def request_body_chat_2(prompt:, options: {}) + option_params = { + chat_history: "", + agent_scratchpad: { + agent_type: "react", + steps: options[:agent_scratchpad] + } + } + + if options[:current_resource_type] + option_params[:context] = { + type: options[:current_resource_type], + content: options[:current_resource_content] + } + end + { prompt: prompt, - options: { - chat_history: "", - agent_scratchpad: { - agent_type: "react", - steps: options[:agent_scratchpad] - }, - context: { - type: options[:current_resource_type], - content: options[:current_resource_content] - } - } + options: option_params } end diff --git a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb index cb75c926190a48..c414e1d75c179a 100644 --- a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb @@ -199,5 +199,33 @@ it_behaves_like 'performing request to the AI Gateway' end + + context 'when request is sent for a new ReAct Duo Chat prompt withouth context params' do + let(:endpoint) { described_class::CHAT_V2_ENDPOINT } + + let(:prompt) { { prompt: user_prompt, options: options } } + + let(:options) do + { + agent_scratchpad: [], + single_action_agent: true + } + end + + let(:body) do + { + prompt: user_prompt, + options: { + chat_history: "", + agent_scratchpad: { + agent_type: "react", + steps: [] + } + } + } + end + + it_behaves_like 'performing request to the AI Gateway' + end end end -- GitLab From fb188a5deb3bd4c41d8dfe49d983dab6f5032636 Mon Sep 17 00:00:00 2001 From: Tetiana Chupryna Date: Sun, 30 Jun 2024 14:17:29 +0200 Subject: [PATCH 27/27] Pass all prompt params --- .../llm/chain/agents/single_action_executor.rb | 13 +++---------- .../llm/chain/agents/single_action_executor_spec.rb | 4 +++- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 6cea26509d6489..eb4e0b1a8d84a8 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -108,19 +108,11 @@ def prompt_options @options = { agent_scratchpad: @agent_scratchpad, conversation: conversation, + current_resource_type: current_resource_type, + current_resource_content: current_resource_content, single_action_agent: true } - - if current_resource_type - @options.merge!( - current_resource_type: current_resource_type, - current_resource_content: current_resource_content - ) - end - - @options end - strong_memoize_attr :prompt_options def picked_tool_action(tool_class) logger.info(message: "Picked tool", tool: tool_class.to_s) @@ -158,6 +150,7 @@ def conversation c.to_s end + strong_memoize_attr :conversation # TODO: remove issue condition when next issue is implemented # https://gitlab.com/gitlab-org/gitlab/-/issues/468905 diff --git a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb index 750ade81768e6d..1a6dbcd834c68b 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb @@ -146,7 +146,9 @@ options: { agent_scratchpad: [], conversation: [], - single_action_agent: true + single_action_agent: true, + current_resource_type: nil, + current_resource_content: nil } }, { unit_primitive: nil } -- GitLab