diff --git a/config/feature_flags/development/explain_current_blob.yml b/config/feature_flags/development/explain_current_blob.yml new file mode 100644 index 0000000000000000000000000000000000000000..e296748a3f79fb40ee574bf406e5dd3c593d0084 --- /dev/null +++ b/config/feature_flags/development/explain_current_blob.yml @@ -0,0 +1,8 @@ +--- +name: explain_current_blob +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128342/ +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/420959 +milestone: '16.3' +type: development +group: group::ai framework +default_enabled: false diff --git a/config/feature_flags/development/push_ai_to_load_identified_issue_json.yml b/config/feature_flags/development/push_ai_to_load_identified_issue_json.yml new file mode 100644 index 0000000000000000000000000000000000000000..265b561c7607a52ddd1afa54a7027df73e7ae957 --- /dev/null +++ b/config/feature_flags/development/push_ai_to_load_identified_issue_json.yml @@ -0,0 +1,8 @@ +--- +name: push_ai_to_load_identified_issue_json +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128342 +rollout_issue_url: +milestone: '16.3' +type: development +group: group::ai framework +default_enabled: false diff --git a/ee/app/finders/llm/extra_resource_finder.rb b/ee/app/finders/llm/extra_resource_finder.rb new file mode 100644 index 0000000000000000000000000000000000000000..c27a3b484205e9d69b61d1e9405b2a6322eb74e4 --- /dev/null +++ b/ee/app/finders/llm/extra_resource_finder.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +module Llm + # ExtraResourceFinder attempts to locate a resource based on `referer_url` + # Presently, the finder only deals with a Blob resource. + # Since the finder does not deal with DB resources, it's been added to spec/support/finder_collection_allowlist.yml. + # As more resource types need to be supported (potentially), appropriate abstractions should be designed and added. + class ExtraResourceFinder + # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/422133 + # The module is meant to be used in controllers. + include ::ExtractsRef + + def initialize(current_user, referer_url) + @current_user = current_user + @referer_url = referer_url + @extra_resources = {} + end + + def execute + find_blob_resource + + @extra_resources + end + + private + + def find_blob_resource + return unless Feature.enabled?(:explain_current_blob, @current_user) && @referer_url + + project_fullpath, resource_path = parse_referer(@referer_url) + return unless project_fullpath && resource_path + + @project = find_project(project_fullpath) + return unless @project + + ref, path = extract_blob_ref_and_path(resource_path) + return unless ref && path + + blob = find_blob(ref, path) + + @extra_resources[:blob] = blob if blob && blob.readable_text? + end + + def find_project(project_fullpath) + project = Project.find_by_full_path(project_fullpath) + return unless project && @current_user.can?(:read_code, project) && project.repository + + project + end + + def find_blob(ref, path) + commit = @project.repository.commit(ref) + return if commit.nil? + + @project.repository.blob_at(commit.id, path) + end + + def parse_referer(referer_url) + referer_url.split("#{Gitlab.config.gitlab.base_url}/")[1].try(:split, "/-/", 2) + end + + def extract_blob_ref_and_path(resource_path) + return unless resource_path.start_with?("blob/") + + resource_path = resource_path + .sub('blob/', '') # Trim `blob/` + .split(%r{\#|\?}, 2) # Extract up to the first occurence of # or ? (URL anchor/param) + .first.tap { |blob_path| blob_path || "" } + return if resource_path.empty? + + extract_ref(resource_path) + end + + # Required to use the method `extract_ref` from ExtractsRef + def repository_container + @project + end + end +end diff --git a/ee/app/graphql/mutations/ai/action.rb b/ee/app/graphql/mutations/ai/action.rb index 0c03e933aad1febf46fc6968d2b0d2e32f35e54d..9d4853ed7242219acd8a13d49442e14162dbcb1a 100644 --- a/ee/app/graphql/mutations/ai/action.rb +++ b/ee/app/graphql/mutations/ai/action.rb @@ -36,6 +36,8 @@ def resolve(**attributes) resource_id, method, options = extract_method_params!(attributes) resource = resource_id&.then { |id| authorized_find!(id: id) } + options[:referer_url] = context[:request].headers["Referer"] if method == :chat + response = Llm::ExecuteMethodService.new(current_user, resource, method, options).execute { diff --git a/ee/app/workers/llm/completion_worker.rb b/ee/app/workers/llm/completion_worker.rb index 8e1153dd0f613524ee23601b5762e9aec2823c3d..8e274b78d92616db115f6bf7c3ffb8a9349d189a 100644 --- a/ee/app/workers/llm/completion_worker.rb +++ b/ee/app/workers/llm/completion_worker.rb @@ -28,6 +28,8 @@ def perform(user_id, resource_id, resource_class, ai_action_name, options = {}) resource = find_resource(resource_id, resource_class) return if resource && !user.can?("read_#{resource.to_ability_name}", resource) + options[:extra_resource] = ::Llm::ExtraResourceFinder.new(user, options.delete(:referer_url)).execute + params = options.extract!(:request_id, :internal_request, :cache_response) logger.debug(message: "Params", params: params) ai_completion = ::Gitlab::Llm::CompletionsFactory.completion(ai_action_name.to_sym, params) diff --git a/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb b/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb index 3aef4d892f4ebcd99b12eac1097d9f5f170eeae3..a5bc2c240e85f132265c43e6ec7f29dd40c9e277 100644 --- a/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb @@ -90,7 +90,9 @@ def options user_input: user_input, agent_scratchpad: +"", conversation: conversation, - prompt_version: prompt_version + prompt_version: prompt_version, + current_code: current_code, + explain_current_blob: Feature.enabled?(:explain_current_blob, context.current_user) } end @@ -116,6 +118,15 @@ def conversation by_request.values.sort_by { |messages| messages.first.timestamp }.flatten end + def current_code + return "" unless Feature.enabled?(:explain_current_blob, context.current_user) + + blob = @context.extra_resource[:blob] + return "" unless blob + + "The current code file that user sees is #{blob.path} and has the following content\n#{blob.data}\n\n" + end + PROMPT_TEMPLATE = [ Utils::Prompt.as_system( <<~PROMPT @@ -141,7 +152,7 @@ def conversation Final Answer: the final answer to the original input question. When concluding your response, provide the final answer as "Final Answer:" as soon as the answer is recognized. - + %s If no tool is needed, give a final answer with "Action: DirectAnswer" for the Action parameter and skip writing an Observation. Begin! PROMPT diff --git a/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/base.rb b/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/base.rb index d36d7d89724018a30fb2bc290b26f8088d2c4f05..bbd0d61cccfecc0048e3d71ae700b0f0bcfa6c5e 100644 --- a/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/base.rb +++ b/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/base.rb @@ -12,8 +12,10 @@ def self.base_prompt(options) options.fetch(:prompt_version), options ) + explain_current_blob = options[:explain_current_blob] + default_system_prompt = Utils::Prompt.default_system_prompt(explain_current_blob: explain_current_blob) - "#{Utils::Prompt.default_system_prompt}\n\n#{base_prompt}" + "#{default_system_prompt}\n\n#{base_prompt}" end end end diff --git a/ee/lib/gitlab/llm/chain/gitlab_context.rb b/ee/lib/gitlab/llm/chain/gitlab_context.rb index 481e6527ad29e1f2d68230d41c0e3f2ed06bf8ec..dba25a0cae3b6fc1371321c21098107168dd512e 100644 --- a/ee/lib/gitlab/llm/chain/gitlab_context.rb +++ b/ee/lib/gitlab/llm/chain/gitlab_context.rb @@ -4,14 +4,15 @@ module Gitlab module Llm module Chain class GitlabContext - attr_accessor :current_user, :container, :resource, :ai_request, :tools_used + attr_accessor :current_user, :container, :resource, :ai_request, :tools_used, :extra_resource - def initialize(current_user:, container:, resource:, ai_request:, tools_used: []) + def initialize(current_user:, container:, resource:, ai_request:, tools_used: [], extra_resource: {}) @current_user = current_user @container = container @resource = resource @ai_request = ai_request @tools_used = tools_used + @extra_resource = extra_resource end end end diff --git a/ee/lib/gitlab/llm/chain/tools/issue_identifier/executor.rb b/ee/lib/gitlab/llm/chain/tools/issue_identifier/executor.rb index 8eccbc780fcd2f6c7987d071d1d19f7d73256ac3..7a6429dd9cf4fcd3d9f141e3cb28d14fe977a714 100644 --- a/ee/lib/gitlab/llm/chain/tools/issue_identifier/executor.rb +++ b/ee/lib/gitlab/llm/chain/tools/issue_identifier/executor.rb @@ -126,6 +126,7 @@ def perform # now the issue in context is being referenced in user input. context.resource = issue content = "I identified the issue #{json[:ResourceIdentifier]}." + content += " For more information use ResourceReader." if load_json? logger.debug(message: "Answer", class: self.class.to_s, content: content) return Answer.new(status: :ok, context: context, content: content, tool: nil) @@ -147,6 +148,10 @@ def perform private + def load_json? + Feature.enabled?(:push_ai_to_load_identified_issue_json) + end + def authorize Utils::Authorizer.context_authorized?(context: context) end diff --git a/ee/lib/gitlab/llm/chain/utils/prompt.rb b/ee/lib/gitlab/llm/chain/utils/prompt.rb index 75327d8754dd4b706f1598abe86381202462e199..df0524b0be436091e91ad321b66757c2d4eda0ef 100644 --- a/ee/lib/gitlab/llm/chain/utils/prompt.rb +++ b/ee/lib/gitlab/llm/chain/utils/prompt.rb @@ -33,7 +33,15 @@ def self.role_conversation(prompt_template, input_variables) end.to_json end - def self.default_system_prompt + def self.default_system_prompt(explain_current_blob: false) + # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/420959 + # Remove the conditional along with the feature flag. + explain_code_prompt = if explain_current_blob + "\nYou can explain code if the user provided a code snippet and answer directly." + else + "" + end + <<~PROMPT You are a DevSecOps Assistant named '#{Gitlab::Llm::Chain::Agents::ZeroShot::Executor::AGENT_NAME}' created by GitLab. @@ -49,6 +57,7 @@ def self.default_system_prompt The generated code should be formatted in markdown. If a question cannot be answered with the tools and information given, answer politely that you don’t know. + #{explain_code_prompt} If the question is to write or generate new code you should always answer directly. When no tool matches you should answer the question directly. diff --git a/ee/lib/gitlab/llm/completions/chat.rb b/ee/lib/gitlab/llm/completions/chat.rb index 777a0fd4d781ff230343f2372611e94e4fb8cbc9..bb25a23f575d14c0da87c0a7cf7c39cc7617a361 100644 --- a/ee/lib/gitlab/llm/completions/chat.rb +++ b/ee/lib/gitlab/llm/completions/chat.rb @@ -19,7 +19,8 @@ def execute(user, resource, options) current_user: user, container: resource.try(:resource_parent)&.root_ancestor, resource: resource, - ai_request: ai_request + ai_request: ai_request, + extra_resource: options.delete(:extra_resource) || {} ) response = Gitlab::Llm::Chain::Agents::ZeroShot::Executor.new( diff --git a/ee/spec/finders/llm/extra_resource_finder_spec.rb b/ee/spec/finders/llm/extra_resource_finder_spec.rb new file mode 100644 index 0000000000000000000000000000000000000000..58f68012249868f73ad89d1b67d1656aefa32f23 --- /dev/null +++ b/ee/spec/finders/llm/extra_resource_finder_spec.rb @@ -0,0 +1,108 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Llm::ExtraResourceFinder, feature_category: :ai_abstraction_layer do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:other_project) { create(:project, :repository) } + let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } } + let_it_be(:other_developer) { create(:user).tap { |u| other_project.add_developer(u) } } + let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } } + let_it_be(:issue) { create(:issue, project: project) } + + let(:current_user) { developer } + let(:blob_url) { Gitlab::Routing.url_helpers.project_blob_url(project, project.default_branch) } + + describe '.execute' do + subject(:execute) { described_class.new(current_user, referer_url).execute } + + context 'with an invalid or non-resource referer_url' do + where(:referer_url) do + [ + [nil], + [''], + ['foo'], + [Gitlab.config.gitlab.base_url], + [lazy { "#{blob_url}/?" }] + ] + end + + with_them do + it 'returns an empty hash' do + expect(execute).to be_empty + end + end + end + + context 'when referer_url references a resource other than Blob' do + let(:referer_url) { ::Gitlab::Routing.url_helpers.project_issue_url(project, issue.id) } + + it 'returns an empty hash' do + expect(execute).to be_empty + end + end + + context 'when referer_url references a Blob' do + let(:referer_url) { "#{blob_url}/#{path}" } + + context 'when referer_url references a valid blob' do + let(:path) { 'files/ruby/popen.rb' } + + context 'when the blob is a readable text' do + let(:expected_blob) { project.repository.blob_at(project.default_branch, path) } + + it 'returns the blob' do + expect(expected_blob).not_to eq(nil) + expect(execute[:blob].id).to eq(expected_blob.id) + end + + context 'when the feature flag :explain_current_blob is disabled for user' do + before do + stub_feature_flags(explain_current_blob: guest) + end + + it 'returns an empty hash' do + expect(execute).to be_empty + end + end + + context "when user is not authorized to read code for the blob's project" do + context 'when user is a guest' do + let(:current_user) { guest } + + it 'returns an empty hash' do + expect(execute).to be_empty + end + end + + context 'when user does not have any access' do + let(:current_user) { other_developer } + + it 'returns an empty hash' do + expect(execute).to be_empty + end + end + end + end + + context 'when the blob is not a readable text' do + let(:non_readable_blob) { project.repository.blob_at(project.default_branch, path) } + let(:path) { 'Gemfile.zip' } + + it 'returns an empty hash' do + expect(non_readable_blob).not_to eq(nil) + expect(execute).to be_empty + end + end + end + + context 'when referer_url references a non-existing blob' do + let(:path) { 'foobar.rb' } + + it 'returns an empty hash' do + expect(execute).to be_empty + end + end + end + end +end diff --git a/ee/spec/graphql/mutations/ai/action_spec.rb b/ee/spec/graphql/mutations/ai/action_spec.rb index 29a3bfdda9013c9ead0aa614f6f1bb3f1516b5fa..032f82bceb8222d9d549a45cc3ad3be9bd668ec2 100644 --- a/ee/spec/graphql/mutations/ai/action_spec.rb +++ b/ee/spec/graphql/mutations/ai/action_spec.rb @@ -7,8 +7,11 @@ let_it_be(:resource, reload: true) { create(:issue) } let(:resource_id) { resource.to_gid.to_s } let(:request_id) { 'uuid' } + let(:request) { instance_double(ActionDispatch::Request, headers: { "Referer" => "foobar" }) } + let(:context) { { current_user: user, request: request } } + let(:expected_options) { {} } - subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) } + subject(:mutation) { described_class.new(object: nil, context: context, field: nil) } describe '#ready?' do let(:arguments) { { summarize_comments: { resource_id: resource_id }, markup_format: :markdown } } @@ -127,15 +130,14 @@ end context 'when resource is null' do - let(:input) { { chat: { resource_id: nil } } } - let(:expected_options) { {} } + let(:resource_id) { nil } it 'calls Llm::ExecuteMethodService' do expect_next_instance_of( Llm::ExecuteMethodService, user, nil, - :chat, + expected_method, expected_options ) do |svc| expect(svc) @@ -169,6 +171,18 @@ end end + context 'when chat input is set ' do + let_it_be(:project) { create(:project, :repository).tap { |p| p.add_developer(user) } } + let_it_be(:issue) { create(:issue, project: project) } + + let(:input) { { chat: { resource_id: resource_id } } } + + it_behaves_like 'an AI action' do + let(:expected_method) { :chat } + let(:expected_options) { { referer_url: "foobar" } } + end + end + context 'when summarize_comments input is set' do let(:input) { { summarize_comments: { resource_id: resource_id } } } let(:expected_method) { :summarize_comments } diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb index f9016c4391d2130f0902d339bc3392e82112db06..c9f127bb59bb79c7ecefbe703483878ff85140d5 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb @@ -10,12 +10,15 @@ let(:tool_answer) { instance_double(Gitlab::Llm::Chain::Answer, is_final?: false, content: 'Bar', status: :ok) } let(:tool_double) { instance_double(Gitlab::Llm::Chain::Tools::IssueIdentifier::Executor) } let(:tools) { [Gitlab::Llm::Chain::Tools::IssueIdentifier] } + let(:extra_resource) { {} } let(:response_double) { "I know the final answer\nFinal Answer: FooBar" } + let(:resource) { user } let(:context) do Gitlab::Llm::Chain::GitlabContext.new( - current_user: user, container: nil, resource: user, ai_request: ai_request_double, - tools_used: [Gitlab::Llm::Chain::Tools::IssueIdentifier, Gitlab::Llm::Chain::Tools::IssueIdentifier] + current_user: user, container: nil, resource: resource, ai_request: ai_request_double, + tools_used: [Gitlab::Llm::Chain::Tools::IssueIdentifier, Gitlab::Llm::Chain::Tools::IssueIdentifier], + extra_resource: extra_resource ) end @@ -155,14 +158,46 @@ agent.prompt end + + context 'when resource is a blob' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:blob) { project.repository.blob_at("master", "README") } + + let(:extra_resource) { { blob: blob } } + let(:injected_prompt) do + "The current code file that user sees is #{blob.path} and has the following content\n#{blob.data}" + end + + before do + stub_feature_flags(explain_current_blob: user) + end + + it 'includes the blob data in the prompt' do + expect(agent.prompt[:prompt]).to include injected_prompt + end + + context 'when the feature flag explain_current_blob is disabled for current user' do + let(:other_user) { create(:user) } + + before do + stub_feature_flags(explain_current_blob: other_user) + end + + it 'omits the blob data in the prompt' do + expect(agent.prompt[:prompt]).to exclude injected_prompt + end + end + end end describe 'real requests', :real_ai_request, :saas do using RSpec::Parameterized::TableSyntax let_it_be_with_reload(:group) { create(:group_with_plan, :public, plan: :ultimate_plan) } - let_it_be(:project) { create(:project, group: group) } + let_it_be(:project) { create(:project, :repository, group: group) } + let(:resource) { user } + let(:extra_resource) { {} } let(:executor) do ai_request = ::Gitlab::Llm::Chain::Requests::Anthropic.new(user) @@ -170,7 +205,8 @@ current_user: user, container: resource.try(:resource_parent)&.root_ancestor, resource: resource, - ai_request: ai_request + ai_request: ai_request, + extra_resource: extra_resource ) described_class.new( @@ -200,6 +236,51 @@ end end + context 'with blob as resource' do + let(:blob) { project.repository.blob_at("master", "files/ruby/popen.rb") } + let(:extra_resource) { { blob: blob } } + + context 'when the feature flag :explain_current_blob is enabled for user' do + where(:input_template, :tools, :answer_match) do + 'Explain the code' | [] | /ruby|popen/i + 'Explain this code' | [] | /ruby|popen/i + 'What is this code doing?' | [] | /ruby|popen/i + 'Can you explain the code ""def hello_world\\nputs(\""Hello, world!\\n\"");\nend""?' | [] | /hello/i + end + + with_them do + let(:input) { input_template } + + before do + stub_feature_flags(explain_current_blob: user) + end + + it_behaves_like 'successful prompt processing' + end + end + + context 'when the feature flag :explain_current_blob is disabled for user' do + let_it_be(:other_user) { create(:user) } + + where(:input_template, :tools, :answer_match) do + 'Explain the code' | [] | /react/i # Hallucinates by making up react code + 'Explain this code' | [] | /react/i # Hallucinates by making up react code + 'What is this code doing?' | [] | /react/i # Hallucinates by making up react code + 'Can you explain the code ""def hello_world\\nputs(\""Hello, world!\\n\"");\nend""?' | [] | /hello/i + end + + with_them do + let(:input) { input_template } + + before do + stub_feature_flags(explain_current_blob: other_user) + end + + it_behaves_like 'successful prompt processing' + end + end + end + context 'with predefined issue', time_travel_to: Time.utc(2023, 8, 11) do let_it_be(:label) { create(:label, project: project, title: 'ai-enablement') } let_it_be(:milestone) { create(:milestone, project: project, title: 'milestone1', due_date: 3.days.from_now) } diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb index 3631a5c7b9d0819ee3c93a12478b14d0a31f4003..0dd66f65d0f439c1f8b5ff6b3651d76f5cde5fb4 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb @@ -25,7 +25,8 @@ 'request_id' => 'uuid1', 'role' => 'assistant', 'content' => 'response 2', 'timestamp' => Time.current.to_s ) ], - prompt_version: prompt_version + prompt_version: prompt_version, + current_code: "" } end diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb index a10cea46752ae162fc50565ba891216d0f560778..b3f00189ee552f99ad81cbc1d07381c54a82bb9f 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb @@ -10,7 +10,8 @@ tool_names: "tool names", user_input: 'foo?', agent_scratchpad: "some observation", - prompt_version: ::Gitlab::Llm::Chain::Agents::ZeroShot::Executor::PROMPT_TEMPLATE + prompt_version: ::Gitlab::Llm::Chain::Agents::ZeroShot::Executor::PROMPT_TEMPLATE, + current_code: "" } prompt = described_class.prompt(options)[:prompt] prompt_text = "Answer the question as accurate as you can." diff --git a/ee/spec/lib/gitlab/llm/chain/tools/issue_identifier/executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/tools/issue_identifier/executor_spec.rb index b099045e92ac6d6b344f78f5be9cbfa91e2fc687..2943aae7e57284de1547d691e52b051059643073 100644 --- a/ee/spec/lib/gitlab/llm/chain/tools/issue_identifier/executor_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/tools/issue_identifier/executor_spec.rb @@ -3,13 +3,18 @@ require 'spec_helper' RSpec.describe Gitlab::Llm::Chain::Tools::IssueIdentifier::Executor, feature_category: :shared do - RSpec.shared_examples 'success response' do + RSpec.shared_examples 'success response' do |ff_off| it 'returns success response' do ai_request = double allow(ai_request).to receive(:request).and_return(ai_response) allow(context).to receive(:ai_request).and_return(ai_request) - response = "I identified the issue #{identifier}." + response = if ff_off + "I identified the issue #{identifier}." + else + "I identified the issue #{identifier}. For more information use ResourceReader." + end + expect(tool.execute.content).to eq(response) end end @@ -139,6 +144,21 @@ it_behaves_like 'success response' end + context 'when push_ai_to_load_identified_issue_json FF is disabled' do + before do + stub_feature_flags(push_ai_to_load_identified_issue_json: false) + end + + context 'when is issue identified with reference' do + let(:identifier) { issue2.to_reference(full: true) } + let(:ai_response) do + "reference\", \"ResourceIdentifier\": \"#{identifier}\"}" + end + + it_behaves_like 'success response', true + end + end + # Skipped pending https://gitlab.com/gitlab-org/gitlab/-/issues/413509 xcontext 'when is issue identified with url' do let(:identifier) { Gitlab::Saas.com_url + Gitlab::Routing.url_helpers.project_issue_path(project, issue2) } diff --git a/ee/spec/lib/gitlab/llm/chain/utils/prompt_spec.rb b/ee/spec/lib/gitlab/llm/chain/utils/prompt_spec.rb index 8abed54a260fb09a13f0591894e3a13c89086c00..db257a22f01c7e92f45c444cd57e472cee433f03 100644 --- a/ee/spec/lib/gitlab/llm/chain/utils/prompt_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/utils/prompt_spec.rb @@ -38,4 +38,20 @@ expect(described_class.role_conversation([prompt], input_vars)).to eq([result].to_json) end end + + describe "#default_system_prompt" do + let(:explain_current_blob) do + "You can explain code if the user provided a code snippet and answer directly." + end + + it 'includes the prompt to explain code directly' do + expect(described_class.default_system_prompt(explain_current_blob: true)).to include explain_current_blob + end + + context 'when explain_current_blob is false by default' do + it 'omits the prompt to explain code directly' do + expect(described_class.default_system_prompt).to exclude explain_current_blob + end + end + end end diff --git a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb index f3081837aa334970793465c29fb86deeee3af8cc..6bf8e12d446c232ba0e6995cb54bc8a2b051de3e 100644 --- a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb +++ b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb @@ -3,15 +3,20 @@ require 'spec_helper' RSpec.describe Gitlab::Llm::Completions::Chat, feature_category: :shared do + include FakeBlobHelpers + let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project, group: group) } - let_it_be(:resource) { create(:issue, project: project) } + let_it_be(:project) { create(:project, :repository, group: group) } + let_it_be(:issue) { create(:issue, project: project) } + let(:resource) { issue } let(:expected_container) { group } let(:content) { 'Summarize issue' } let(:ai_request) { instance_double(Gitlab::Llm::Chain::Requests::Anthropic) } - let(:options) { { content: content } } + let(:blob) { fake_blob(path: 'file.md') } + let(:extra_resource) { { blob: blob } } + let(:options) { { request_id: 'uuid', content: content, extra_resource: extra_resource } } let(:container) { group } let(:context) do instance_double( @@ -51,7 +56,8 @@ .to receive(:increment) .with(labels: { tool: "IssueIdentifier" }, success: true) expect(::Gitlab::Llm::Chain::GitlabContext).to receive(:new) - .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request) + .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request, + extra_resource: extra_resource) .and_return(context) subject @@ -143,7 +149,8 @@ end expect(::Gitlab::Llm::Chain::GitlabContext).to receive(:new) - .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request) + .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request, + extra_resource: extra_resource) .and_return(context) subject diff --git a/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb b/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb index 36f5d603820eb4ca8aa7dfd1ce0e0bbaa8257396..1cd83b6ca64dc1f058b2891a599db0927fafa81c 100644 --- a/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb +++ b/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb @@ -34,7 +34,7 @@ expect(Llm::CompletionWorker).to receive(:perform_async).with( current_user.id, nil, nil, :chat, { content: "summarize", markup_format: :raw, request_id: an_instance_of(String), - cache_response: true, emit_user_messages: true + cache_response: true, emit_user_messages: true, referer_url: nil } ) @@ -47,7 +47,7 @@ expect(Llm::CompletionWorker).to receive(:perform_async).with( current_user.id, resource.id, "Issue", :chat, { content: "summarize", markup_format: :raw, request_id: an_instance_of(String), - cache_response: true, emit_user_messages: true + cache_response: true, emit_user_messages: true, referer_url: nil } ) @@ -64,7 +64,7 @@ expect(Llm::CompletionWorker).to receive(:perform_async).with( current_user.id, current_user.id, "User", :chat, { content: "summarize", markup_format: :raw, request_id: an_instance_of(String), - cache_response: true, emit_user_messages: true + cache_response: true, emit_user_messages: true, referer_url: nil } ) diff --git a/ee/spec/workers/llm/completion_worker_spec.rb b/ee/spec/workers/llm/completion_worker_spec.rb index f88d3551955884857c59027fc70074f1726c578e..c198eb3c06721ae0929719d7d107147fc3e00103 100644 --- a/ee/spec/workers/llm/completion_worker_spec.rb +++ b/ee/spec/workers/llm/completion_worker_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' RSpec.describe Llm::CompletionWorker, feature_category: :team_planning do + include FakeBlobHelpers include AfterNextHelpers it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed @@ -19,13 +20,22 @@ let(:options) { { 'key' => 'value' } } let(:ai_template) { { method: :completions, prompt: 'something', options: { temperature: 0.7 } } } let(:ai_action_name) { :summarize_comments } - let(:params) { options.merge(request_id: 'uuid', internal_request: true, cache_response: false) } + let(:referer_url) { nil } + let(:extra_resource) { {} } + + let(:params) do + options.merge(request_id: 'uuid', internal_request: true, cache_response: false, referer_url: referer_url) + end subject { described_class.new.perform(user_id, resource_id, resource_type, ai_action_name, params) } shared_examples 'performs successfully' do it 'calls Gitlab::Llm::CompletionsFactory' do completion = instance_double(Gitlab::Llm::Completions::SummarizeAllOpenNotes) + extra_resource_finder = instance_double(::Llm::ExtraResourceFinder) + + expect(::Llm::ExtraResourceFinder).to receive(:new).with(user, referer_url).and_return(extra_resource_finder) + expect(extra_resource_finder).to receive(:execute).and_return(extra_resource) expect(Gitlab::Llm::CompletionsFactory) .to receive(:completion) @@ -34,7 +44,7 @@ expect(completion) .to receive(:execute) - .with(user, resource, options.symbolize_keys) + .with(user, resource, options.symbolize_keys.merge(extra_resource: extra_resource)) subject end @@ -45,6 +55,13 @@ group.add_reporter(user) end + context 'when extra resource is found' do + let(:referer_url) { "foobar" } + let(:extra_resource) { { blob: fake_blob(path: 'file.md') } } + + it_behaves_like 'performs successfully' + end + context 'for an issue' do let_it_be(:resource) { create(:issue, project: project) } diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml index 5de8e8cdca2f4976b432dd9db9f2c74b8f03c390..e7dd9cea9229f264d1de826e5c3c78d97dc1768f 100644 --- a/spec/support/finder_collection_allowlist.yml +++ b/spec/support/finder_collection_allowlist.yml @@ -7,6 +7,7 @@ - Namespaces::FreeUserCap::UsersFinder # Reason: There is no need to have anything else besides the count - Groups::EnvironmentScopesFinder # Reason: There is no need to have anything else besides the simple strucutre with the scope name - Security::RelatedPipelinesFinder # Reason: There is no need to have anything else besides the IDs of pipelines +- Llm::ExtraResourceFinder # Reason: The finder does not deal with DB-backend resource for now. # Temporary excludes (aka TODOs) # For example: