feat: Add support for more tool, standardize copilot chat service (#11560)

This commit is contained in:
Pranav
2025-05-23 01:07:07 -07:00
committed by GitHub
parent d40a59f7fa
commit 03c0a7c62e
12 changed files with 334 additions and 107 deletions

View File

@@ -19,9 +19,9 @@ module Enterprise::Api::V1::Accounts::ConversationsController
response = Captain::Copilot::ChatService.new(
assistant,
previous_messages: copilot_params[:previous_messages],
conversation_history: @conversation.to_llm_text,
language: @conversation.account.locale_english_name
previous_history: copilot_params[:previous_history],
conversation_id: @conversation.display_id,
user_id: Current.user.id
).generate_response(copilot_params[:message])
render json: { message: response['response'] }
@@ -44,6 +44,6 @@ module Enterprise::Api::V1::Accounts::ConversationsController
private
def copilot_params
params.permit(:previous_messages, :message, :assistant_id)
params.permit(:previous_history, :message, :assistant_id)
end
end

View File

@@ -1,6 +1,6 @@
module Captain::ChatHelper
def request_chat_completion
Rails.logger.debug { "[CAPTAIN][ChatCompletion] #{@messages}" }
log_chat_completion_request
response = @client.chat(
parameters: {
@@ -15,13 +15,17 @@ module Captain::ChatHelper
handle_response(response)
end
private
def handle_response(response)
Rails.logger.debug { "[CAPTAIN][ChatCompletion] #{response}" }
Rails.logger.debug { "#{self.class.name} Assistant: #{@assistant.id}, Received response #{response}" }
message = response.dig('choices', 0, 'message')
if message['tool_calls']
process_tool_calls(message['tool_calls'])
else
JSON.parse(message['content'].strip)
message = JSON.parse(message['content'].strip)
persist_message(message, 'assistant')
message
end
end
@@ -41,12 +45,14 @@ module Captain::ChatHelper
if @tool_registry.respond_to?(function_name)
execute_tool(function_name, arguments, tool_call_id)
else
process_invalid_tool_call(tool_call_id)
process_invalid_tool_call(function_name, tool_call_id)
end
end
def execute_tool(function_name, arguments, tool_call_id)
persist_message({ content: "Using tool #{function_name}", function_name: function_name }, 'assistant_thinking')
result = @tool_registry.send(function_name, arguments)
persist_message({ content: "Completed #{function_name} tool call", function_name: function_name }, 'assistant_thinking')
append_tool_response(result, tool_call_id)
end
@@ -57,7 +63,8 @@ module Captain::ChatHelper
}
end
def process_invalid_tool_call(tool_call_id)
def process_invalid_tool_call(function_name, tool_call_id)
persist_message({ content: 'Invalid tool call', function_name: function_name }, 'assistant_thinking')
append_tool_response('Tool not available', tool_call_id)
end
@@ -68,4 +75,12 @@ module Captain::ChatHelper
content: content
}
end
def log_chat_completion_request
Rails.logger.info(
"#{self.class.name} Assistant: #{@assistant.id}, Requesting chat completion
for messages #{@messages} with #{@tool_registry&.registered_tools&.length || 0} tools
"
)
end
end

View File

@@ -25,6 +25,7 @@ class CopilotMessage < ApplicationRecord
validates :message_type, presence: true, inclusion: { in: message_types.keys }
validates :message, presence: true
validate :validate_message_attributes
after_create_commit :broadcast_message
@@ -47,4 +48,13 @@ class CopilotMessage < ApplicationRecord
def broadcast_message
Rails.configuration.dispatcher.dispatch(COPILOT_MESSAGE_CREATED, Time.zone.now, copilot_message: self)
end
def validate_message_attributes
return if message.blank?
allowed_keys = %w[content reasoning function_name]
invalid_keys = message.keys - allowed_keys
errors.add(:message, "contains invalid attributes: #{invalid_keys.join(', ')}") if invalid_keys.any?
end
end

View File

@@ -40,7 +40,7 @@ class CopilotThread < ApplicationRecord
.order(created_at: :asc)
.map do |copilot_message|
{
content: copilot_message.message,
content: copilot_message.message['content'],
role: copilot_message.message_type
}
end

View File

@@ -6,7 +6,6 @@ module Enterprise::Concerns::User
has_many :captain_responses, class_name: 'Captain::AssistantResponse', dependent: :nullify, as: :documentable
has_many :copilot_threads, dependent: :destroy_async
has_many :copilot_messages, dependent: :destroy_async
end
def ensure_installation_pricing_plan_quantity

View File

@@ -3,49 +3,110 @@ require 'openai'
class Captain::Copilot::ChatService < Llm::BaseOpenAiService
include Captain::ChatHelper
attr_reader :assistant, :account, :user, :copilot_thread, :previous_history, :messages
def initialize(assistant, config)
super()
@assistant = assistant
@conversation_history = config[:conversation_history]
@previous_messages = config[:previous_messages] || []
@language = config[:language] || 'english'
@account = assistant.account
@user = nil
@copilot_thread = nil
@previous_history = []
setup_user(config)
setup_message_history(config)
register_tools
@messages = [system_message, conversation_history_context] + @previous_messages
@response = ''
@messages = build_messages(config)
end
def generate_response(input)
@messages << { role: 'user', content: input } if input.present?
response = request_chat_completion
Rails.logger.info("[CAPTAIN][CopilotChatService] Incrementing response usage for #{@assistant.account.id}")
@assistant.account.increment_response_usage
Rails.logger.debug { "#{self.class.name} Assistant: #{@assistant.id}, Received response #{response}" }
Rails.logger.info(
"#{self.class.name} Assistant: #{@assistant.id}, Incrementing response usage for account #{@account.id}"
)
@account.increment_response_usage
response
end
private
def setup_user(config)
@user = @account.users.find_by(id: config[:user_id]) if config[:user_id].present?
end
def build_messages(config)
messages= [system_message]
messages << account_id_context
messages += @previous_history if @previous_history.present?
messages += current_viewing_history(config[:conversation_id]) if config[:conversation_id].present?
messages
end
def setup_message_history(config)
Rails.logger.info(
"#{self.class.name} Assistant: #{@assistant.id}, Previous History: #{config[:previous_history]&.length || 0}, Language: #{config[:language]}"
)
@copilot_thread = @account.copilot_threads.find_by(id: config[:thread_id]) if config[:thread_id].present?
@previous_history = if @copilot_thread.present?
@copilot_thread.previous_history
else
config[:previous_history].presence || []
end
end
def register_tools
@tool_registry = Captain::ToolRegistryService.new(@assistant)
@tool_registry = Captain::ToolRegistryService.new(@assistant, user: @user)
@tool_registry.register_tool(Captain::Tools::SearchDocumentationService)
@tool_registry.register_tool(Captain::Tools::Copilot::GetArticleService)
@tool_registry.register_tool(Captain::Tools::Copilot::GetContactService)
@tool_registry.register_tool(Captain::Tools::Copilot::GetConversationService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchArticlesService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchContactsService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchConversationsService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchLinearIssuesService)
end
def system_message
{
role: 'system',
content: Captain::Llm::SystemPromptsService.copilot_response_generator(@assistant.config['product_name'], @language)
content: Captain::Llm::SystemPromptsService.copilot_response_generator(@assistant.config['product_name'])
}
end
def conversation_history_context
def account_id_context
{
role: 'system',
content: "
Message History with the user is below:
#{@conversation_history}
"
content: "The current account id is #{@account.id}. The account is using #{@account.locale_english_name} as the language."
}
end
def current_viewing_history(conversation_id)
conversation = @account.conversations.find_by(display_id: conversation_id)
return [] unless conversation
Rails.logger.info("#{self.class.name} Assistant: #{@assistant.id}, Setting viewing history for conversation_id=#{conversation_id}")
contact_id = conversation.contact_id
[{
role: 'system',
content: <<~HISTORY.strip
You are currently viewing the conversation with the following details:
Conversation ID: #{conversation_id}
Contact ID: #{contact_id}
HISTORY
}]
end
def persist_message(message, message_type = 'assistant')
return if @copilot_thread.blank?
@copilot_thread.copilot_messages.create!(
message: message,
message_type: message_type
)
end
end

View File

@@ -21,7 +21,7 @@ class Captain::Llm::AssistantChatService < Llm::BaseOpenAiService
private
def register_tools
@tool_registry = Captain::ToolRegistryService.new(@assistant)
@tool_registry = Captain::ToolRegistryService.new(@assistant, user: nil)
@tool_registry.register_tool(Captain::Tools::SearchDocumentationService)
end
@@ -31,4 +31,8 @@ class Captain::Llm::AssistantChatService < Llm::BaseOpenAiService
content: Captain::Llm::SystemPromptsService.assistant_response_generator(@assistant.name, @assistant.config['product_name'], @assistant.config)
}
end
def persist_message(message, message_type = 'assistant')
# No need to implement
end
end

View File

@@ -56,18 +56,18 @@ class Captain::Llm::SystemPromptsService
SYSTEM_PROMPT_MESSAGE
end
def copilot_response_generator(product_name, language)
def copilot_response_generator(product_name)
<<~SYSTEM_PROMPT_MESSAGE
[Identity]
You are Captain, a helpful and friendly copilot assistant for support agents using the product #{product_name}. Your primary role is to assist support agents by retrieving information, compiling accurate responses, and guiding them through customer interactions.
You should only provide information related to #{product_name} and must not address queries about other products or external events.
[Context]
You will be provided with the message history between the support agent and the customer. Use this context to understand the conversation flow, identify unresolved queries, and ensure responses are relevant and consistent with previous interactions. Always maintain a coherent and professional tone throughout the conversation.
Identify unresolved queries, and ensure responses are relevant and consistent with previous interactions. Always maintain a coherent and professional tone throughout the conversation.
[Response Guidelines]
- Use natural, polite, and conversational language that is clear and easy to follow. Keep sentences short and use simple words.
- Reply in the language the agent is using, if you're not able to detect the language, reply in #{language}.
- Reply in the language the agent is using, if you're not able to detect the language.
- Provide brief and relevant responsestypically one or two sentences unless a more detailed explanation is necessary.
- Do not use your own training data or assumptions to answer queries. Base responses strictly on the provided information.
- If the query is unclear, ask concise clarifying questions instead of making assumptions.

View File

@@ -46,7 +46,7 @@ class Captain::Tools::Copilot::SearchLinearIssuesService < Captain::Tools::BaseS
end
def active?
@assistant.account.hooks.find_by(app_id: 'linear').present?
@user.present? && @assistant.account.hooks.find_by(app_id: 'linear').present?
end
private

View File

@@ -47,9 +47,9 @@ RSpec.describe CopilotThread, type: :model do
expect(history.length).to eq(2)
expect(history[0][:role]).to eq('user')
expect(history[0][:content]).to eq({ 'content' => 'User message' })
expect(history[0][:content]).to eq('User message')
expect(history[1][:role]).to eq('assistant')
expect(history[1][:content]).to eq({ 'content' => 'Assistant message' })
expect(history[1][:content]).to eq('Assistant message')
end
end

View File

@@ -2,87 +2,245 @@ require 'rails_helper'
RSpec.describe Captain::Copilot::ChatService do
let(:account) { create(:account, custom_attributes: { plan_name: 'startups' }) }
let(:captain_inbox_association) { create(:captain_inbox, captain_assistant: assistant, inbox: inbox) }
let(:mock_captain_agent) { instance_double(Captain::Agent) }
let(:mock_captain_tool) { instance_double(Captain::Tool) }
let(:mock_openai_client) { instance_double(OpenAI::Client) }
let(:user) { create(:user, account: account) }
let(:inbox) { create(:inbox, account: account) }
let(:assistant) { create(:captain_assistant, account: account) }
let(:contact) { create(:contact, account: account) }
let(:conversation) { create(:conversation, account: account, inbox: inbox, contact: contact) }
let(:mock_openai_client) { instance_double(OpenAI::Client) }
let(:copilot_thread) { create(:captain_copilot_thread, account: account, user: user) }
let!(:copilot_message) do
create(
:captain_copilot_message, account: account, copilot_thread: copilot_thread
)
end
let(:previous_history) { [{ role: copilot_message.message_type, content: copilot_message.message['content'] }] }
let(:config) do
{ user_id: user.id, thread_id: copilot_thread.id, conversation_id: conversation.display_id }
end
before do
create(:installation_config, name: 'CAPTAIN_OPEN_AI_API_KEY', value: 'test-key')
allow(OpenAI::Client).to receive(:new).and_return(mock_openai_client)
allow(mock_openai_client).to receive(:chat).and_return({
choices: [{ message: { content: '{ "content": "Hey" }' } }]
}.with_indifferent_access)
end
describe '#initialize' do
it 'sets default language to english when not specified' do
service = described_class.new(assistant, { previous_messages: [], conversation_history: '' })
expect(service.instance_variable_get(:@language)).to eq('english')
it 'sets up the service with correct instance variables' do
service = described_class.new(assistant, config)
expect(service.assistant).to eq(assistant)
expect(service.account).to eq(account)
expect(service.user).to eq(user)
expect(service.copilot_thread).to eq(copilot_thread)
expect(service.previous_history).to eq(previous_history)
end
it 'uses the specified language when provided' do
service = described_class.new(assistant, {
previous_messages: [],
conversation_history: '',
language: 'spanish'
})
expect(service.instance_variable_get(:@language)).to eq('spanish')
it 'builds messages with system message and account context' do
service = described_class.new(assistant, config)
messages = service.messages
expect(messages.first[:role]).to eq('system')
expect(messages.second[:role]).to eq('system')
expect(messages.second[:content]).to include(account.id.to_s)
end
end
describe '#generate_response' do
before do
allow(OpenAI::Client).to receive(:new).and_return(mock_openai_client)
allow(mock_openai_client).to receive(:chat).and_return({ choices: [{ message: { content: '{ "result": "Hey" }' } }] }.with_indifferent_access)
let(:service) { described_class.new(assistant, config) }
allow(Captain::Agent).to receive(:new).and_return(mock_captain_agent)
allow(mock_captain_agent).to receive(:execute).and_return(true)
allow(mock_captain_agent).to receive(:register_tool).and_return(true)
it 'adds user input to messages when present' do
expect do
service.generate_response('Hello')
end.to(change { service.messages.count }.by(1))
allow(Captain::Tool).to receive(:new).and_return(mock_captain_tool)
allow(mock_captain_tool).to receive(:register_method).and_return(true)
allow(account).to receive(:increment_response_usage).and_return(true)
last_message = service.messages.last
expect(last_message[:role]).to eq('user')
expect(last_message[:content]).to eq('Hello')
end
it 'increments usage' do
described_class.new(assistant, { previous_messages: ['Hello'], conversation_history: 'Hi' }).generate_response('Hey')
expect(account).to have_received(:increment_response_usage).once
it 'does not add user input to messages when blank' do
expect do
service.generate_response('')
end.not_to(change { service.messages.count })
end
it 'includes language in system message' do
service = described_class.new(assistant, {
previous_messages: [],
conversation_history: '',
language: 'spanish'
})
it 'returns the response from request_chat_completion' do
expect(service.generate_response('Hello')).to eq({ 'content' => 'Hey' })
end
allow(Captain::Llm::SystemPromptsService).to receive(:copilot_response_generator)
.with(assistant.config['product_name'], 'spanish')
.and_return('Spanish system prompt')
context 'when response contains tool calls' do
before do
allow(mock_openai_client).to receive(:chat).and_return(
{
choices: [{ message: { 'tool_calls' => tool_calls } }]
}.with_indifferent_access,
{
choices: [{ message: { content: '{ "content": "Tool response processed" }' } }]
}.with_indifferent_access
)
end
system_message = service.send(:system_message)
expect(system_message[:content]).to eq('Spanish system prompt')
context 'when tool call is valid' do
let(:tool_calls) do
[{
'id' => 'call_123',
'function' => {
'name' => 'get_conversation',
'arguments' => "{ \"conversation_id\": #{conversation.display_id} }"
}
}]
end
it 'processes tool calls and appends them to messages' do
result = service.generate_response("Find conversation #{conversation.id}")
expect(result).to eq({ 'content' => 'Tool response processed' })
expect(service.messages).to include(
{ role: 'assistant', tool_calls: tool_calls }
)
expect(service.messages).to include(
{
role: 'tool', tool_call_id: 'call_123', content: conversation.to_llm_text
}
)
expect(result).to eq({ 'content' => 'Tool response processed' })
end
end
context 'when tool call is invalid' do
let(:tool_calls) do
[{
'id' => 'call_123',
'function' => {
'name' => 'get_settings',
'arguments' => '{}'
}
}]
end
it 'handles invalid tool calls' do
result = service.generate_response('Find settings')
expect(result).to eq({ 'content' => 'Tool response processed' })
expect(service.messages).to include(
{
role: 'assistant', tool_calls: tool_calls
}
)
expect(service.messages).to include(
{
role: 'tool',
tool_call_id: 'call_123',
content: 'Tool not available'
}
)
end
end
end
end
describe '#execute' do
before do
allow(OpenAI::Client).to receive(:new).and_return(mock_openai_client)
allow(mock_openai_client).to receive(:chat).and_return({ choices: [{ message: { content: '{ "result": "Hey" }' } }] }.with_indifferent_access)
allow(Captain::Agent).to receive(:new).and_return(mock_captain_agent)
allow(mock_captain_agent).to receive(:execute).and_return(true)
allow(mock_captain_agent).to receive(:register_tool).and_return(true)
allow(Captain::Tool).to receive(:new).and_return(mock_captain_tool)
allow(mock_captain_tool).to receive(:register_method).and_return(true)
allow(account).to receive(:increment_response_usage).and_return(true)
describe '#setup_user' do
it 'sets user when user_id is present in config' do
service = described_class.new(assistant, { user_id: user.id })
expect(service.user).to eq(user)
end
it 'increments usage' do
described_class.new(assistant, { previous_messages: ['Hello'], conversation_history: 'Hi' }).generate_response('Hey')
expect(account).to have_received(:increment_response_usage).once
it 'does not set user when user_id is not present in config' do
service = described_class.new(assistant, {})
expect(service.user).to be_nil
end
end
describe '#setup_message_history' do
context 'when thread_id is present' do
it 'finds the copilot thread and sets previous history from it' do
service = described_class.new(assistant, { thread_id: copilot_thread.id })
expect(service.copilot_thread).to eq(copilot_thread)
expect(service.previous_history).to eq previous_history
end
end
context 'when thread_id is not present' do
it 'uses previous_history from config if present' do
custom_history = [{ role: 'user', content: 'Custom message' }]
service = described_class.new(assistant, { previous_history: custom_history })
expect(service.copilot_thread).to be_nil
expect(service.previous_history).to eq(custom_history)
end
it 'uses empty array if previous_history is not present in config' do
service = described_class.new(assistant, {})
expect(service.copilot_thread).to be_nil
expect(service.previous_history).to eq([])
end
end
end
describe '#build_messages' do
it 'includes system message and account context' do
service = described_class.new(assistant, {})
messages = service.messages
expect(messages.first[:role]).to eq('system')
expect(messages.second[:role]).to eq('system')
expect(messages.second[:content]).to include(account.id.to_s)
end
it 'includes previous history when present' do
custom_history = [{ role: 'user', content: 'Custom message' }]
service = described_class.new(assistant, { previous_history: custom_history })
messages = service.messages
expect(messages.count).to be >= 3
expect(messages.any? { |m| m[:content] == 'Custom message' }).to be true
end
it 'includes current viewing history when conversation_id is present' do
service = described_class.new(assistant, { conversation_id: conversation.display_id })
messages = service.messages
viewing_history = messages.find { |m| m[:content].include?('You are currently viewing the conversation') }
expect(viewing_history).not_to be_nil
expect(viewing_history[:content]).to include(conversation.display_id.to_s)
expect(viewing_history[:content]).to include(contact.id.to_s)
end
end
describe '#persist_message' do
context 'when copilot_thread is present' do
it 'creates a copilot message' do
allow(mock_openai_client).to receive(:chat).and_return({
choices: [{ message: { content: '{ "content": "Hey" }' } }]
}.with_indifferent_access)
expect do
described_class.new(assistant, { thread_id: copilot_thread.id }).generate_response('Hello')
end.to change(CopilotMessage, :count).by(1)
last_message = CopilotMessage.last
expect(last_message.message_type).to eq('assistant')
expect(last_message.message['content']).to eq('Hey')
end
end
context 'when copilot_thread is not present' do
it 'does not create a copilot message' do
allow(mock_openai_client).to receive(:chat).and_return({
choices: [{ message: { content: '{ "content": "Hey" }' } }]
}.with_indifferent_access)
expect do
described_class.new(assistant, {}).generate_response('Hello')
end.not_to(change(CopilotMessage, :count))
end
end
end
end

View File

@@ -203,24 +203,4 @@ describe ActionCableListener do
listener.conversation_updated(event)
end
end
describe '#copilot_message_created' do
let(:event_name) { :copilot_message_created }
let(:account) { create(:account) }
let(:user) { create(:user, account: account) }
let(:assistant) { create(:captain_assistant, account: account) }
let(:copilot_thread) { create(:captain_copilot_thread, account: account, user: user, assistant: assistant) }
let(:copilot_message) { create(:captain_copilot_message, copilot_thread: copilot_thread) }
let(:event) { Events::Base.new(event_name, Time.zone.now, copilot_message: copilot_message) }
it 'broadcasts message to the user' do
expect(ActionCableBroadcastJob).to receive(:perform_later).with(
[user.pubsub_token],
'copilot.message.created',
copilot_message.push_event_data
)
listener.copilot_message_created(event)
end
end
end