feat: Migrate ruby llm captain (#12981)

Co-authored-by: aakashb95 <aakash@chatwoot.com>
Co-authored-by: Shivam Mishra <scm.mymail@gmail.com>
This commit is contained in:
Aakash Bakhle
2025-12-04 18:26:10 +05:30
committed by GitHub
parent 0a17976913
commit eed2eaceb0
41 changed files with 474 additions and 734 deletions

View File

@@ -88,7 +88,7 @@ Metrics/ModuleLength:
Rails/HelperInstanceVariable:
Exclude:
- enterprise/app/helpers/captain/chat_helper.rb
- 'enterprise/app/helpers/captain/tool_execution_helper.rb'
- enterprise/app/helpers/captain/chat_response_helper.rb
Rails/ApplicationController:
Exclude:
- 'app/controllers/api/v1/widget/messages_controller.rb'

View File

@@ -1,31 +1,83 @@
module Captain::ChatHelper
include Integrations::LlmInstrumentation
include Captain::ToolExecutionHelper
include Captain::ChatResponseHelper
def request_chat_completion
log_chat_completion_request
chat = build_chat
add_messages_to_chat(chat)
with_agent_session do
response = instrument_llm_call(instrumentation_params) do
@client.chat(
parameters: chat_parameters
)
end
handle_response(response)
response = chat.ask(conversation_messages.last[:content])
build_response(response)
end
rescue StandardError => e
Rails.logger.error "#{self.class.name} Assistant: #{@assistant.id}, Error in chat completion: #{e}"
raise e
end
def instrumentation_params
private
def build_chat
llm_chat = chat(model: @model, temperature: temperature)
llm_chat.with_params(response_format: { type: 'json_object' })
llm_chat = setup_tools(llm_chat)
setup_system_instructions(llm_chat)
setup_event_handlers(llm_chat)
llm_chat
end
def setup_tools(chat)
@tools&.each do |tool|
chat.with_tool(tool)
end
chat
end
def setup_system_instructions(chat)
system_messages = @messages.select { |m| m[:role] == 'system' || m[:role] == :system }
combined_instructions = system_messages.pluck(:content).join("\n\n")
chat.with_instructions(combined_instructions)
end
def setup_event_handlers(chat)
chat.on_new_message { start_llm_turn_span(instrumentation_params(chat)) }
chat.on_end_message { |message| end_llm_turn_span(message) }
chat.on_tool_call { |tool_call| handle_tool_call(tool_call) }
chat.on_tool_result { |result| handle_tool_result(result) }
chat
end
def handle_tool_call(tool_call)
persist_thinking_message(tool_call)
start_tool_span(tool_call)
@pending_tool_calls ||= []
@pending_tool_calls.push(tool_call)
end
def handle_tool_result(result)
end_tool_span(result)
persist_tool_completion
end
def add_messages_to_chat(chat)
conversation_messages[0...-1].each do |msg|
chat.add_message(role: msg[:role].to_sym, content: msg[:content])
end
end
def instrumentation_params(chat = nil)
{
span_name: "llm.captain.#{feature_name}",
account_id: resolved_account_id,
conversation_id: @conversation_id,
feature_name: feature_name,
model: @model,
messages: @messages,
messages: chat ? chat.messages.map { |m| { role: m.role.to_s, content: m.content.to_s } } : @messages,
temperature: temperature,
metadata: {
assistant_id: @assistant&.id
@@ -33,14 +85,8 @@ module Captain::ChatHelper
}
end
def chat_parameters
{
model: @model,
messages: @messages,
tools: @tool_registry&.registered_tools || [],
response_format: { type: 'json_object' },
temperature: temperature
}
def conversation_messages
@messages.reject { |m| m[:role] == 'system' || m[:role] == :system }
end
def temperature
@@ -51,8 +97,6 @@ module Captain::ChatHelper
@account&.id || @assistant&.account_id
end
private
# Ensures all LLM calls and tool executions within an agentic loop
# are grouped under a single trace/session in Langfuse.
#
@@ -78,7 +122,7 @@ module Captain::ChatHelper
def log_chat_completion_request
Rails.logger.info(
"#{self.class.name} Assistant: #{@assistant.id}, Requesting chat completion
for messages #{@messages} with #{@tool_registry&.registered_tools&.length || 0} tools
for messages #{@messages} with #{@tools&.length || 0} tools
"
)
end

View File

@@ -0,0 +1,52 @@
module Captain::ChatResponseHelper
private
def build_response(response)
Rails.logger.debug { "#{self.class.name} Assistant: #{@assistant.id}, Received response #{response}" }
parsed = parse_json_response(response.content)
persist_message(parsed, 'assistant')
parsed
end
def parse_json_response(content)
content = content.gsub('```json', '').gsub('```', '')
content = content.strip
JSON.parse(content)
rescue JSON::ParserError => e
Rails.logger.error "#{self.class.name} Assistant: #{@assistant.id}, Error parsing JSON response: #{e.message}"
{ 'content' => content }
end
def persist_thinking_message(tool_call)
return if @copilot_thread.blank?
tool_name = tool_call.name.to_s
persist_message(
{
'content' => "Using #{tool_name}",
'function_name' => tool_name
},
'assistant_thinking'
)
end
def persist_tool_completion
return if @copilot_thread.blank?
tool_call = @pending_tool_calls&.pop
return unless tool_call
tool_name = tool_call.name.to_s
persist_message(
{
'content' => "Completed #{tool_name}",
'function_name' => tool_name
},
'assistant_thinking'
)
end
end

View File

@@ -1,83 +0,0 @@
module Captain::ToolExecutionHelper
private
def handle_response(response)
Rails.logger.debug { "#{self.class.name} Assistant: #{@assistant.id}, Received response #{response}" }
message = response.dig('choices', 0, 'message')
if message['tool_calls']
process_tool_calls(message['tool_calls'])
else
message = JSON.parse(message['content'].strip)
persist_message(message, 'assistant')
message
end
end
def process_tool_calls(tool_calls)
append_tool_calls(tool_calls)
tool_calls.each { |tool_call| process_tool_call(tool_call) }
request_chat_completion
end
def process_tool_call(tool_call)
arguments = JSON.parse(tool_call['function']['arguments'])
function_name = tool_call['function']['name']
tool_call_id = tool_call['id']
if @tool_registry.respond_to?(function_name)
execute_tool(function_name, arguments, tool_call_id)
else
process_invalid_tool_call(function_name, tool_call_id)
end
end
def execute_tool(function_name, arguments, tool_call_id)
persist_tool_status(function_name, 'captain.copilot.using_tool')
result = perform_tool_call(function_name, arguments)
persist_tool_status(function_name, 'captain.copilot.completed_tool_call')
append_tool_response(result, tool_call_id)
end
def perform_tool_call(function_name, arguments)
instrument_tool_call(function_name, arguments) do
@tool_registry.send(function_name, arguments)
end
rescue StandardError => e
Rails.logger.error "Tool #{function_name} failed: #{e.message}"
"Error executing #{function_name}: #{e.message}"
end
def persist_tool_status(function_name, translation_key)
persist_message(
{
content: I18n.t(translation_key, function_name: function_name),
function_name: function_name
},
'assistant_thinking'
)
end
def append_tool_calls(tool_calls)
@messages << {
role: 'assistant',
tool_calls: tool_calls
}
end
def process_invalid_tool_call(function_name, tool_call_id)
persist_message(
{ content: I18n.t('captain.copilot.invalid_tool_call'), function_name: function_name },
'assistant_thinking'
)
append_tool_response(I18n.t('captain.copilot.tool_not_available'), tool_call_id)
end
def append_tool_response(content, tool_call_id)
@messages << {
role: 'tool',
tool_call_id: tool_call_id,
content: content
}
end
end

View File

@@ -1,6 +1,4 @@
require 'openai'
class Captain::Copilot::ChatService < Llm::BaseOpenAiService
class Captain::Copilot::ChatService < Llm::BaseAiService
include Captain::ChatHelper
attr_reader :assistant, :account, :user, :copilot_thread, :previous_history, :messages
@@ -14,9 +12,10 @@ class Captain::Copilot::ChatService < Llm::BaseOpenAiService
@copilot_thread = nil
@previous_history = []
@conversation_id = config[:conversation_id]
setup_user(config)
setup_message_history(config)
register_tools
@tools = build_tools
@messages = build_messages(config)
end
@@ -60,16 +59,19 @@ class Captain::Copilot::ChatService < Llm::BaseOpenAiService
end
end
def register_tools
@tool_registry = Captain::ToolRegistryService.new(@assistant, user: @user)
@tool_registry.register_tool(Captain::Tools::SearchDocumentationService)
@tool_registry.register_tool(Captain::Tools::Copilot::GetArticleService)
@tool_registry.register_tool(Captain::Tools::Copilot::GetContactService)
@tool_registry.register_tool(Captain::Tools::Copilot::GetConversationService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchArticlesService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchContactsService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchConversationsService)
@tool_registry.register_tool(Captain::Tools::Copilot::SearchLinearIssuesService)
def build_tools
tools = []
tools << Captain::Tools::SearchDocumentationService.new(@assistant, user: @user)
tools << Captain::Tools::Copilot::GetConversationService.new(@assistant, user: @user)
tools << Captain::Tools::Copilot::SearchConversationsService.new(@assistant, user: @user)
tools << Captain::Tools::Copilot::GetContactService.new(@assistant, user: @user)
tools << Captain::Tools::Copilot::GetArticleService.new(@assistant, user: @user)
tools << Captain::Tools::Copilot::SearchArticlesService.new(@assistant, user: @user)
tools << Captain::Tools::Copilot::SearchContactsService.new(@assistant, user: @user)
tools << Captain::Tools::Copilot::SearchLinearIssuesService.new(@assistant, user: @user)
tools.select(&:active?)
end
def system_message
@@ -77,12 +79,16 @@ class Captain::Copilot::ChatService < Llm::BaseOpenAiService
role: 'system',
content: Captain::Llm::SystemPromptsService.copilot_response_generator(
@assistant.config['product_name'],
@tool_registry.tools_summary,
tools_summary,
@assistant.config
)
}
end
def tools_summary
@tools.map { |tool| "- #{tool.class.name}: #{tool.class.description}" }.join("\n")
end
def account_id_context
{
role: 'system',

View File

@@ -1,6 +1,4 @@
require 'openai'
class Captain::Llm::AssistantChatService < Llm::BaseOpenAiService
class Captain::Llm::AssistantChatService < Llm::BaseAiService
include Captain::ChatHelper
def initialize(assistant: nil, conversation_id: nil)
@@ -8,9 +6,10 @@ class Captain::Llm::AssistantChatService < Llm::BaseOpenAiService
@assistant = assistant
@conversation_id = conversation_id
@messages = [system_message]
@response = ''
register_tools
@tools = build_tools
end
# additional_message: A single message (String) from the user that should be appended to the chat.
@@ -28,9 +27,8 @@ class Captain::Llm::AssistantChatService < Llm::BaseOpenAiService
private
def register_tools
@tool_registry = Captain::ToolRegistryService.new(@assistant, user: nil)
@tool_registry.register_tool(Captain::Tools::SearchDocumentationService)
def build_tools
[Captain::Tools::SearchDocumentationService.new(@assistant, user: nil)]
end
def system_message

View File

@@ -1,4 +1,4 @@
class Captain::Llm::ContactAttributesService < Llm::BaseOpenAiService
class Captain::Llm::ContactAttributesService < Llm::LegacyBaseOpenAiService
def initialize(assistant, conversation)
super()
@assistant = assistant

View File

@@ -1,4 +1,4 @@
class Captain::Llm::ContactNotesService < Llm::BaseOpenAiService
class Captain::Llm::ContactNotesService < Llm::LegacyBaseOpenAiService
def initialize(assistant, conversation)
super()
@assistant = assistant

View File

@@ -1,4 +1,4 @@
class Captain::Llm::ConversationFaqService < Llm::BaseOpenAiService
class Captain::Llm::ConversationFaqService < Llm::LegacyBaseOpenAiService
DISTANCE_THRESHOLD = 0.3
def initialize(assistant, conversation)

View File

@@ -1,6 +1,6 @@
require 'openai'
class Captain::Llm::EmbeddingService < Llm::BaseOpenAiService
class Captain::Llm::EmbeddingService < Llm::LegacyBaseOpenAiService
class EmbeddingsError < StandardError; end
def self.embedding_model

View File

@@ -1,4 +1,4 @@
class Captain::Llm::FaqGeneratorService < Llm::BaseOpenAiService
class Captain::Llm::FaqGeneratorService < Llm::LegacyBaseOpenAiService
def initialize(content, language = 'english')
super()
@language = language

View File

@@ -1,4 +1,4 @@
class Captain::Llm::PaginatedFaqGeneratorService < Llm::BaseOpenAiService
class Captain::Llm::PaginatedFaqGeneratorService < Llm::LegacyBaseOpenAiService
# Default pages per chunk - easily configurable
DEFAULT_PAGES_PER_CHUNK = 10
MAX_ITERATIONS = 20 # Safety limit to prevent infinite loops

View File

@@ -1,4 +1,4 @@
class Captain::Llm::PdfProcessingService < Llm::BaseOpenAiService
class Captain::Llm::PdfProcessingService < Llm::LegacyBaseOpenAiService
def initialize(document)
super()
@document = document

View File

@@ -1,4 +1,4 @@
class Captain::Onboarding::WebsiteAnalyzerService < Llm::BaseOpenAiService
class Captain::Onboarding::WebsiteAnalyzerService < Llm::LegacyBaseOpenAiService
MAX_CONTENT_LENGTH = 8000
def initialize(website_url)

View File

@@ -0,0 +1,26 @@
class Captain::Tools::BaseTool < RubyLLM::Tool
attr_accessor :assistant
def initialize(assistant, user: nil)
@assistant = assistant
@user = user
super()
end
def active?
true
end
private
def user_has_permission(permission)
return false if @user.blank?
account_user = AccountUser.find_by(account_id: @assistant.account_id, user_id: @user.id)
return false if account_user.blank?
return account_user.custom_role.permissions.include?(permission) if account_user.custom_role.present?
account_user.administrator? || account_user.agent?
end
end

View File

@@ -1,32 +1,11 @@
class Captain::Tools::Copilot::GetArticleService < Captain::Tools::BaseService
def name
class Captain::Tools::Copilot::GetArticleService < Captain::Tools::BaseTool
def self.name
'get_article'
end
description 'Get details of an article including its content and metadata'
param :article_id, type: :number, desc: 'The ID of the article to retrieve', required: true
def description
'Get details of an article including its content and metadata'
end
def parameters
{
type: 'object',
properties: {
article_id: {
type: 'number',
description: 'The ID of the article to retrieve'
}
},
required: %w[article_id]
}
end
def execute(arguments)
article_id = arguments['article_id']
Rails.logger.info { "#{self.class.name}: Article ID: #{article_id}" }
return 'Missing required parameters' if article_id.blank?
def execute(article_id:)
article = Article.find_by(id: article_id, account_id: @assistant.account_id)
return 'Article not found' if article.nil?

View File

@@ -1,32 +1,11 @@
class Captain::Tools::Copilot::GetContactService < Captain::Tools::BaseService
def name
class Captain::Tools::Copilot::GetContactService < Captain::Tools::BaseTool
def self.name
'get_contact'
end
description 'Get details of a contact including their profile information'
param :contact_id, type: :number, desc: 'The ID of the contact to retrieve', required: true
def description
'Get details of a contact including their profile information'
end
def parameters
{
type: 'object',
properties: {
contact_id: {
type: 'number',
description: 'The ID of the contact to retrieve'
}
},
required: %w[contact_id]
}
end
def execute(arguments)
contact_id = arguments['contact_id']
Rails.logger.info "#{self.class.name}: Contact ID: #{contact_id}"
return 'Missing required parameters' if contact_id.blank?
def execute(contact_id:)
contact = Contact.find_by(id: contact_id, account_id: @assistant.account_id)
return 'Contact not found' if contact.nil?

View File

@@ -1,32 +1,12 @@
class Captain::Tools::Copilot::GetConversationService < Captain::Tools::BaseService
def name
class Captain::Tools::Copilot::GetConversationService < Captain::Tools::BaseTool
def self.name
'get_conversation'
end
description 'Get details of a conversation including messages and contact information'
def description
'Get details of a conversation including messages and contact information'
end
def parameters
{
type: 'object',
properties: {
conversation_id: {
type: 'number',
description: 'The ID of the conversation to retrieve'
}
},
required: %w[conversation_id]
}
end
def execute(arguments)
conversation_id = arguments['conversation_id']
Rails.logger.info "#{self.class.name}: Conversation ID: #{conversation_id}"
return 'Missing required parameters' if conversation_id.blank?
param :conversation_id, type: :integer, desc: 'ID of the conversation to retrieve', required: true
def execute(conversation_id:)
conversation = Conversation.find_by(display_id: conversation_id, account_id: @assistant.account_id)
return 'Conversation not found' if conversation.blank?

View File

@@ -1,36 +1,22 @@
class Captain::Tools::Copilot::SearchArticlesService < Captain::Tools::BaseService
def name
class Captain::Tools::Copilot::SearchArticlesService < Captain::Tools::BaseTool
def self.name
'search_articles'
end
def description
'Search articles based on parameters'
description 'Search articles based on parameters'
params do
string :query, description: 'Search articles by title or content (partial match)'
number :category_id, description: 'Filter articles by category ID'
any_of :status, description: 'Filter articles by status' do
string enum: %w[draft published archived]
end
end
def parameters
{
type: 'object',
properties: properties,
required: ['query']
}
end
def execute(arguments)
query = arguments['query']
category_id = arguments['category_id']
status = arguments['status']
Rails.logger.info "#{self.class.name}: Query: #{query}, Category ID: #{category_id}, Status: #{status}"
return 'Missing required parameters' if query.blank?
articles = fetch_articles(query, category_id, status)
def execute(query: nil, category_id: nil, status: nil)
articles = fetch_articles(query: query, category_id: category_id, status: status)
return 'No articles found' unless articles.exists?
total_count = articles.count
articles = articles.limit(100)
<<~RESPONSE
#{total_count > 100 ? "Found #{total_count} articles (showing first 100)" : "Total number of articles: #{total_count}"}
#{articles.map(&:to_llm_text).join("\n---\n")}
@@ -43,29 +29,11 @@ class Captain::Tools::Copilot::SearchArticlesService < Captain::Tools::BaseServi
private
def fetch_articles(query, category_id, status)
def fetch_articles(query:, category_id:, status:)
articles = Article.where(account_id: @assistant.account_id)
articles = articles.where('title ILIKE :query OR content ILIKE :query', query: "%#{query}%") if query.present?
articles = articles.where(category_id: category_id) if category_id.present?
articles = articles.where(status: status) if status.present?
articles
end
def properties
{
query: {
type: 'string',
description: 'Search articles by title or content (partial match)'
},
category_id: {
type: 'number',
description: 'Filter articles by category ID'
},
status: {
type: 'string',
enum: %w[draft published archived],
description: 'Filter articles by status'
}
}
end
end

View File

@@ -1,27 +1,14 @@
class Captain::Tools::Copilot::SearchContactsService < Captain::Tools::BaseService
def name
class Captain::Tools::Copilot::SearchContactsService < Captain::Tools::BaseTool
def self.name
'search_contacts'
end
def description
'Search contacts based on query parameters'
end
def parameters
{
type: 'object',
properties: properties,
required: []
}
end
def execute(arguments)
email = arguments['email']
phone_number = arguments['phone_number']
name = arguments['name']
Rails.logger.info "#{self.class.name} Email: #{email}, Phone Number: #{phone_number}, Name: #{name}"
description 'Search contacts based on query parameters'
param :email, type: :string, desc: 'Filter contacts by email'
param :phone_number, type: :string, desc: 'Filter contacts by phone number'
param :name, type: :string, desc: 'Filter contacts by name (partial match)'
def execute(email: nil, phone_number: nil, name: nil)
contacts = Contact.where(account_id: @assistant.account_id)
contacts = contacts.where(email: email) if email.present?
contacts = contacts.where(phone_number: phone_number) if phone_number.present?
@@ -39,23 +26,4 @@ class Captain::Tools::Copilot::SearchContactsService < Captain::Tools::BaseServi
def active?
user_has_permission('contact_manage')
end
private
def properties
{
email: {
type: 'string',
description: 'Filter contacts by email'
},
phone_number: {
type: 'string',
description: 'Filter contacts by phone number'
},
name: {
type: 'string',
description: 'Filter contacts by name (partial match)'
}
}
end
end

View File

@@ -1,26 +1,15 @@
class Captain::Tools::Copilot::SearchConversationsService < Captain::Tools::BaseService
def name
'search_conversations'
class Captain::Tools::Copilot::SearchConversationsService < Captain::Tools::BaseTool
def self.name
'search_conversation'
end
description 'Search conversations based on parameters'
def description
'Search conversations based on parameters'
end
def parameters
{
type: 'object',
properties: properties,
required: []
}
end
def execute(arguments)
status = arguments['status']
contact_id = arguments['contact_id']
priority = arguments['priority']
labels = arguments['labels']
param :status, type: :string, desc: 'Status of the conversation'
param :contact_id, type: :number, desc: 'Contact id'
param :priority, type: :string, desc: 'Priority of conversation'
param :labels, type: :string, desc: 'Labels available'
def execute(status: nil, contact_id: nil, priority: nil, labels: nil)
conversations = get_conversations(status, contact_id, priority, labels)
return 'No conversations found' unless conversations.exists?
@@ -58,13 +47,4 @@ class Captain::Tools::Copilot::SearchConversationsService < Captain::Tools::Base
@assistant.account
).perform
end
def properties
{
contact_id: { type: 'number', description: 'Filter conversations by contact ID' },
status: { type: 'string', enum: %w[open resolved pending snoozed], description: 'Filter conversations by status' },
priority: { type: 'string', enum: %w[low medium high urgent], description: 'Filter conversations by priority' },
labels: { type: 'array', items: { type: 'string' }, description: 'Filter conversations by labels' }
}
end
end

View File

@@ -1,34 +1,14 @@
class Captain::Tools::Copilot::SearchLinearIssuesService < Captain::Tools::BaseService
def name
class Captain::Tools::Copilot::SearchLinearIssuesService < Captain::Tools::BaseTool
def self.name
'search_linear_issues'
end
def description
'Search Linear issues based on a search term'
end
description 'Search Linear issues based on a search term'
param :term, type: :string, desc: 'The search term to find Linear issues', required: true
def parameters
{
type: 'object',
properties: {
term: {
type: 'string',
description: 'The search term to find Linear issues'
}
},
required: %w[term]
}
end
def execute(arguments)
def execute(term:)
return 'Linear integration is not enabled' unless active?
term = arguments['term']
Rails.logger.info "#{self.class.name}: Service called with the search term #{term}"
return 'Missing required parameters' if term.blank?
linear_service = Integrations::Linear::ProcessorService.new(account: @assistant.account)
result = linear_service.search_issue(term)

View File

@@ -1,27 +1,12 @@
class Captain::Tools::SearchDocumentationService < Captain::Tools::BaseService
def name
class Captain::Tools::SearchDocumentationService < Captain::Tools::BaseTool
def self.name
'search_documentation'
end
description 'Search and retrieve documentation from knowledge base'
def description
'Search and retrieve documentation from knowledge base'
end
param :query, desc: 'Search Query', required: true
def parameters
{
type: 'object',
properties: {
search_query: {
type: 'string',
description: 'The search query to look up in the documentation.'
}
},
required: ['search_query']
}
end
def execute(arguments)
query = arguments['search_query']
def execute(query:)
Rails.logger.info { "#{self.class.name}: #{query}" }
responses = assistant.responses.approved.search(query)

View File

@@ -1,4 +1,4 @@
class Internal::AccountAnalysis::ContentEvaluatorService < Llm::BaseOpenAiService
class Internal::AccountAnalysis::ContentEvaluatorService < Llm::LegacyBaseOpenAiService
def initialize
super()

View File

@@ -0,0 +1,33 @@
# frozen_string_literal: true
# Base service for LLM operations using RubyLLM.
# New features should inherit from this class.
class Llm::BaseAiService
DEFAULT_MODEL = Llm::Config::DEFAULT_MODEL
DEFAULT_TEMPERATURE = 1.0
attr_reader :model, :temperature
def initialize
Llm::Config.initialize!
setup_model
setup_temperature
end
# Returns a configured RubyLLM chat instance.
# Subclasses can override model/temperature via instance variables or pass them explicitly.
def chat(model: @model, temperature: @temperature)
RubyLLM.chat(model: model).with_temperature(temperature)
end
private
def setup_model
config_value = InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_MODEL')&.value
@model = (config_value.presence || DEFAULT_MODEL)
end
def setup_temperature
@temperature = DEFAULT_TEMPERATURE
end
end

View File

@@ -1,5 +1,11 @@
class Llm::BaseOpenAiService
DEFAULT_MODEL = 'gpt-4o-mini'.freeze
# frozen_string_literal: true
# DEPRECATED: This class uses the legacy OpenAI Ruby gem directly.
# New features should use Llm::BaseAiService with RubyLLM instead.
# This class will be removed once all services are migrated to RubyLLM.
class Llm::LegacyBaseOpenAiService
DEFAULT_MODEL = 'gpt-4o-mini'
attr_reader :client, :model
def initialize

View File

@@ -1,4 +1,4 @@
class Messages::AudioTranscriptionService < Llm::BaseOpenAiService
class Messages::AudioTranscriptionService < Llm::LegacyBaseOpenAiService
attr_reader :attachment, :message, :account
def initialize(attachment)

View File

@@ -1,12 +1,11 @@
# frozen_string_literal: true
require 'opentelemetry_config'
require_relative 'llm_instrumentation_constants'
require_relative 'llm_instrumentation_helpers'
module Integrations::LlmInstrumentation
include Integrations::LlmInstrumentationConstants
include Integrations::LlmInstrumentationHelpers
include Integrations::LlmInstrumentationSpans
PROVIDER_PREFIXES = {
'openai' => %w[gpt- o1 o3 o4 text-embedding- whisper- tts-],
@@ -16,10 +15,6 @@ module Integrations::LlmInstrumentation
'deepseek' => %w[deepseek-]
}.freeze
def tracer
@tracer ||= OpentelemetryConfig.tracer
end
def instrument_llm_call(params)
return yield unless ChatwootApp.otel_enabled?
@@ -43,6 +38,7 @@ module Integrations::LlmInstrumentation
result = nil
executed = false
tracer.in_span(params[:span_name]) do |span|
set_request_attributes(span, params)
set_metadata_attributes(span, params)
# By default, the input and output of a trace are set from the root observation
@@ -98,7 +94,12 @@ module Integrations::LlmInstrumentation
end
def record_completion(span, result)
set_completion_attributes(span, result) if result.is_a?(Hash)
if result.respond_to?(:content)
span.set_attribute(ATTR_GEN_AI_COMPLETION_ROLE, result.role.to_s) if result.respond_to?(:role)
span.set_attribute(ATTR_GEN_AI_COMPLETION_CONTENT, result.content.to_s)
elsif result.is_a?(Hash)
set_completion_attributes(span, result) if result.is_a?(Hash)
end
end
def set_request_attributes(span, params)
@@ -117,17 +118,4 @@ module Integrations::LlmInstrumentation
span.set_attribute(format(ATTR_GEN_AI_PROMPT_CONTENT, idx), content.to_s)
end
end
def set_metadata_attributes(span, params)
session_id = params[:conversation_id].present? ? "#{params[:account_id]}_#{params[:conversation_id]}" : nil
span.set_attribute(ATTR_LANGFUSE_USER_ID, params[:account_id].to_s) if params[:account_id]
span.set_attribute(ATTR_LANGFUSE_SESSION_ID, session_id) if session_id.present?
span.set_attribute(ATTR_LANGFUSE_TAGS, [params[:feature_name]].to_json)
return unless params[:metadata].is_a?(Hash)
params[:metadata].each do |key, value|
span.set_attribute(format(ATTR_LANGFUSE_METADATA, key), value.to_s)
end
end
end

View File

@@ -37,4 +37,17 @@ module Integrations::LlmInstrumentationHelpers
span.set_attribute(ATTR_GEN_AI_RESPONSE_ERROR_CODE, error_code) if error_code
span.status = OpenTelemetry::Trace::Status.error("API Error: #{error_code}")
end
def set_metadata_attributes(span, params)
session_id = params[:conversation_id].present? ? "#{params[:account_id]}_#{params[:conversation_id]}" : nil
span.set_attribute(ATTR_LANGFUSE_USER_ID, params[:account_id].to_s) if params[:account_id]
span.set_attribute(ATTR_LANGFUSE_SESSION_ID, session_id) if session_id.present?
span.set_attribute(ATTR_LANGFUSE_TAGS, [params[:feature_name]].to_json)
return unless params[:metadata].is_a?(Hash)
params[:metadata].each do |key, value|
span.set_attribute(format(ATTR_LANGFUSE_METADATA, key), value.to_s)
end
end
end

View File

@@ -0,0 +1,92 @@
# frozen_string_literal: true
require 'opentelemetry_config'
require_relative 'llm_instrumentation_constants'
module Integrations::LlmInstrumentationSpans
include Integrations::LlmInstrumentationConstants
def tracer
@tracer ||= OpentelemetryConfig.tracer
end
def start_llm_turn_span(params)
return unless ChatwootApp.otel_enabled?
span = tracer.start_span(params[:span_name])
set_llm_turn_request_attributes(span, params)
set_llm_turn_prompt_attributes(span, params[:messages]) if params[:messages]
@pending_llm_turn_spans ||= []
@pending_llm_turn_spans.push(span)
rescue StandardError => e
Rails.logger.warn "Failed to start LLM turn span: #{e.message}"
end
def end_llm_turn_span(message)
return unless ChatwootApp.otel_enabled?
span = @pending_llm_turn_spans&.pop
return unless span
set_llm_turn_response_attributes(span, message) if message
span.finish
rescue StandardError => e
Rails.logger.warn "Failed to end LLM turn span: #{e.message}"
end
def start_tool_span(tool_call)
return unless ChatwootApp.otel_enabled?
tool_name = tool_call.name.to_s
span = tracer.start_span(format(TOOL_SPAN_NAME, tool_name))
span.set_attribute(ATTR_LANGFUSE_OBSERVATION_INPUT, tool_call.arguments.to_json)
@pending_tool_spans ||= []
@pending_tool_spans.push(span)
rescue StandardError => e
Rails.logger.warn "Failed to start tool span: #{e.message}"
end
def end_tool_span(result)
return unless ChatwootApp.otel_enabled?
span = @pending_tool_spans&.pop
return unless span
output = result.is_a?(String) ? result : result.to_json
span.set_attribute(ATTR_LANGFUSE_OBSERVATION_OUTPUT, output)
span.finish
rescue StandardError => e
Rails.logger.warn "Failed to end tool span: #{e.message}"
end
private
def set_llm_turn_request_attributes(span, params)
provider = determine_provider(params[:model])
span.set_attribute(ATTR_GEN_AI_PROVIDER, provider)
span.set_attribute(ATTR_GEN_AI_REQUEST_MODEL, params[:model]) if params[:model]
span.set_attribute(ATTR_GEN_AI_REQUEST_TEMPERATURE, params[:temperature]) if params[:temperature]
end
def set_llm_turn_prompt_attributes(span, messages)
messages.each_with_index do |msg, idx|
span.set_attribute(format(ATTR_GEN_AI_PROMPT_ROLE, idx), msg[:role])
span.set_attribute(format(ATTR_GEN_AI_PROMPT_CONTENT, idx), msg[:content])
end
span.set_attribute(ATTR_LANGFUSE_OBSERVATION_INPUT, messages.to_json)
end
def set_llm_turn_response_attributes(span, message)
span.set_attribute(ATTR_GEN_AI_COMPLETION_ROLE, message.role.to_s) if message.respond_to?(:role)
span.set_attribute(ATTR_GEN_AI_COMPLETION_CONTENT, message.content.to_s) if message.respond_to?(:content)
set_llm_turn_usage_attributes(span, message)
span.set_attribute(ATTR_LANGFUSE_OBSERVATION_OUTPUT, message.content.to_s) if message.respond_to?(:content)
end
def set_llm_turn_usage_attributes(span, message)
span.set_attribute(ATTR_GEN_AI_USAGE_INPUT_TOKENS, message.input_tokens) if message.respond_to?(:input_tokens) && message.input_tokens
span.set_attribute(ATTR_GEN_AI_USAGE_OUTPUT_TOKENS, message.output_tokens) if message.respond_to?(:output_tokens) && message.output_tokens
end
end

View File

@@ -33,6 +33,7 @@ module Llm::Config
RubyLLM.configure do |config|
config.openai_api_key = system_api_key if system_api_key.present?
config.openai_api_base = openai_endpoint.chomp('/') if openai_endpoint.present?
config.logger = Rails.logger
end
end

View File

@@ -7,7 +7,6 @@ RSpec.describe Captain::Copilot::ChatService do
let(:assistant) { create(:captain_assistant, account: account) }
let(:contact) { create(:contact, account: account) }
let(:conversation) { create(:conversation, account: account, inbox: inbox, contact: contact) }
let(:mock_openai_client) { instance_double(OpenAI::Client) }
let(:copilot_thread) { create(:captain_copilot_thread, account: account, user: user) }
let!(:copilot_message) do
create(
@@ -20,13 +19,29 @@ RSpec.describe Captain::Copilot::ChatService do
{ user_id: user.id, copilot_thread_id: copilot_thread.id, conversation_id: conversation.display_id }
end
# RubyLLM mocks
let(:mock_chat) { instance_double(RubyLLM::Chat) }
let(:mock_response) do
instance_double(RubyLLM::Message, content: '{ "content": "Hey", "reasoning": "Test reasoning", "reply_suggestion": false }')
end
before do
create(:installation_config, name: 'CAPTAIN_OPEN_AI_API_KEY', value: 'test-key')
create(:installation_config, name: 'CAPTAIN_OPEN_AI_ENDPOINT', value: 'https://api.openai.com/')
allow(OpenAI::Client).to receive(:new).and_return(mock_openai_client)
allow(mock_openai_client).to receive(:chat).and_return({
choices: [{ message: { content: '{ "content": "Hey" }' } }]
}.with_indifferent_access)
InstallationConfig.find_or_create_by(name: 'CAPTAIN_OPEN_AI_API_KEY') do |c|
c.value = 'test-key'
end
allow(RubyLLM).to receive(:chat).and_return(mock_chat)
allow(mock_chat).to receive(:with_temperature).and_return(mock_chat)
allow(mock_chat).to receive(:with_params).and_return(mock_chat)
allow(mock_chat).to receive(:with_tool).and_return(mock_chat)
allow(mock_chat).to receive(:with_instructions).and_return(mock_chat)
allow(mock_chat).to receive(:add_message).and_return(mock_chat)
allow(mock_chat).to receive(:on_new_message).and_return(mock_chat)
allow(mock_chat).to receive(:on_end_message).and_return(mock_chat)
allow(mock_chat).to receive(:on_tool_call).and_return(mock_chat)
allow(mock_chat).to receive(:on_tool_result).and_return(mock_chat)
allow(mock_chat).to receive(:messages).and_return([])
allow(mock_chat).to receive(:ask).and_return(mock_response)
end
describe '#initialize' do
@@ -48,48 +63,6 @@ RSpec.describe Captain::Copilot::ChatService do
expect(messages.second[:role]).to eq('system')
expect(messages.second[:content]).to include(account.id.to_s)
end
it 'initializes OpenAI client with configured endpoint' do
expect(OpenAI::Client).to receive(:new).with(
access_token: 'test-key',
uri_base: 'https://api.openai.com/',
log_errors: Rails.env.development?
)
described_class.new(assistant, config)
end
context 'when CAPTAIN_OPEN_AI_ENDPOINT is not configured' do
before do
InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_ENDPOINT')&.destroy
end
it 'uses default OpenAI endpoint' do
expect(OpenAI::Client).to receive(:new).with(
access_token: 'test-key',
uri_base: 'https://api.openai.com/',
log_errors: Rails.env.development?
)
described_class.new(assistant, config)
end
end
context 'when custom endpoint is configured' do
before do
InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_ENDPOINT').update!(value: 'https://custom.azure.com/')
end
it 'uses custom endpoint for OpenAI client' do
expect(OpenAI::Client).to receive(:new).with(
access_token: 'test-key',
uri_base: 'https://custom.azure.com/',
log_errors: Rails.env.development?
)
described_class.new(assistant, config)
end
end
end
describe '#generate_response' do
@@ -112,82 +85,19 @@ RSpec.describe Captain::Copilot::ChatService do
end
it 'returns the response from request_chat_completion' do
expect(service.generate_response('Hello')).to eq({ 'content' => 'Hey' })
result = service.generate_response('Hello')
expect(result).to eq({ 'content' => 'Hey', 'reasoning' => 'Test reasoning', 'reply_suggestion' => false })
end
context 'when response contains tool calls' do
before do
allow(mock_openai_client).to receive(:chat).and_return(
{
choices: [{ message: { 'tool_calls' => tool_calls } }]
}.with_indifferent_access,
{
choices: [{ message: { content: '{ "content": "Tool response processed" }' } }]
}.with_indifferent_access
)
end
context 'when tool call is valid' do
let(:tool_calls) do
[{
'id' => 'call_123',
'function' => {
'name' => 'get_conversation',
'arguments' => "{ \"conversation_id\": #{conversation.display_id} }"
}
}]
end
it 'processes tool calls and appends them to messages' do
result = service.generate_response("Find conversation #{conversation.id}")
expect(result).to eq({ 'content' => 'Tool response processed' })
expect(service.messages).to include(
{ role: 'assistant', tool_calls: tool_calls }
)
expect(service.messages).to include(
{
role: 'tool', tool_call_id: 'call_123', content: conversation.to_llm_text
}
)
expect(result).to eq({ 'content' => 'Tool response processed' })
end
end
context 'when tool call is invalid' do
let(:tool_calls) do
[{
'id' => 'call_123',
'function' => {
'name' => 'get_settings',
'arguments' => '{}'
}
}]
end
it 'handles invalid tool calls' do
result = service.generate_response('Find settings')
expect(result).to eq({ 'content' => 'Tool response processed' })
expect(service.messages).to include(
{
role: 'assistant', tool_calls: tool_calls
}
)
expect(service.messages).to include(
{
role: 'tool',
tool_call_id: 'call_123',
content: 'Tool not available'
}
)
end
end
it 'increments response usage for the account' do
expect do
service.generate_response('Hello')
end.to(change { account.reload.custom_attributes['captain_responses_usage'].to_i }.by(1))
end
end
describe '#setup_user' do
describe 'user setup behavior' do
it 'sets user when user_id is present in config' do
service = described_class.new(assistant, { user_id: user.id })
expect(service.user).to eq(user)
@@ -199,7 +109,7 @@ RSpec.describe Captain::Copilot::ChatService do
end
end
describe '#setup_message_history' do
describe 'message history behavior' do
context 'when copilot_thread_id is present' do
it 'finds the copilot thread and sets previous history from it' do
service = described_class.new(assistant, { copilot_thread_id: copilot_thread.id })
@@ -227,7 +137,7 @@ RSpec.describe Captain::Copilot::ChatService do
end
end
describe '#build_messages' do
describe 'message building behavior' do
it 'includes system message and account context' do
service = described_class.new(assistant, {})
messages = service.messages
@@ -257,13 +167,9 @@ RSpec.describe Captain::Copilot::ChatService do
end
end
describe '#persist_message' do
describe 'message persistence behavior' do
context 'when copilot_thread is present' do
it 'creates a copilot message' do
allow(mock_openai_client).to receive(:chat).and_return({
choices: [{ message: { content: '{ "content": "Hey" }' } }]
}.with_indifferent_access)
it 'creates a copilot message with the response' do
expect do
described_class.new(assistant, { copilot_thread_id: copilot_thread.id }).generate_response('Hello')
end.to change(CopilotMessage, :count).by(1)
@@ -276,10 +182,6 @@ RSpec.describe Captain::Copilot::ChatService do
context 'when copilot_thread is not present' do
it 'does not create a copilot message' do
allow(mock_openai_client).to receive(:chat).and_return({
choices: [{ message: { content: '{ "content": "Hey" }' } }]
}.with_indifferent_access)
expect do
described_class.new(assistant, {}).generate_response('Hello')
end.not_to(change(CopilotMessage, :count))

View File

@@ -19,19 +19,8 @@ RSpec.describe Captain::Tools::Copilot::GetArticleService do
end
describe '#parameters' do
it 'returns the expected parameter schema' do
expect(service.parameters).to eq(
{
type: 'object',
properties: {
article_id: {
type: 'number',
description: 'The ID of the article to retrieve'
}
},
required: %w[article_id]
}
)
it 'defines article_id parameter' do
expect(service.parameters.keys).to contain_exactly(:article_id)
end
end
@@ -79,13 +68,13 @@ RSpec.describe Captain::Tools::Copilot::GetArticleService do
describe '#execute' do
context 'when article_id is blank' do
it 'returns error message' do
expect(service.execute({})).to eq('Missing required parameters')
expect(service.execute(article_id: nil)).to eq('Article not found')
end
end
context 'when article is not found' do
it 'returns not found message' do
expect(service.execute({ 'article_id' => 999 })).to eq('Article not found')
expect(service.execute(article_id: 999)).to eq('Article not found')
end
end
@@ -94,7 +83,7 @@ RSpec.describe Captain::Tools::Copilot::GetArticleService do
let(:article) { create(:article, account: account, portal: portal, author: user, title: 'Test Article', content: 'Content') }
it 'returns the article in llm text format' do
result = service.execute({ 'article_id' => article.id })
result = service.execute(article_id: article.id)
expect(result).to eq(article.to_llm_text)
end
@@ -104,7 +93,7 @@ RSpec.describe Captain::Tools::Copilot::GetArticleService do
let(:other_article) { create(:article, account: other_account, portal: other_portal, author: user, title: 'Other Article') }
it 'returns not found message' do
expect(service.execute({ 'article_id' => other_article.id })).to eq('Article not found')
expect(service.execute(article_id: other_article.id)).to eq('Article not found')
end
end
end

View File

@@ -19,19 +19,8 @@ RSpec.describe Captain::Tools::Copilot::GetContactService do
end
describe '#parameters' do
it 'returns the expected parameter schema' do
expect(service.parameters).to eq(
{
type: 'object',
properties: {
contact_id: {
type: 'number',
description: 'The ID of the contact to retrieve'
}
},
required: %w[contact_id]
}
)
it 'defines contact_id parameter' do
expect(service.parameters.keys).to contain_exactly(:contact_id)
end
end
@@ -78,14 +67,14 @@ RSpec.describe Captain::Tools::Copilot::GetContactService do
describe '#execute' do
context 'when contact_id is blank' do
it 'returns error message' do
expect(service.execute({})).to eq('Missing required parameters')
it 'returns not found message' do
expect(service.execute(contact_id: nil)).to eq('Contact not found')
end
end
context 'when contact is not found' do
it 'returns not found message' do
expect(service.execute({ 'contact_id' => 999 })).to eq('Contact not found')
expect(service.execute(contact_id: 999)).to eq('Contact not found')
end
end
@@ -93,7 +82,7 @@ RSpec.describe Captain::Tools::Copilot::GetContactService do
let(:contact) { create(:contact, account: account) }
it 'returns the contact in llm text format' do
result = service.execute({ 'contact_id' => contact.id })
result = service.execute(contact_id: contact.id)
expect(result).to eq(contact.to_llm_text)
end
@@ -102,7 +91,7 @@ RSpec.describe Captain::Tools::Copilot::GetContactService do
let(:other_contact) { create(:contact, account: other_account) }
it 'returns not found message' do
expect(service.execute({ 'contact_id' => other_contact.id })).to eq('Contact not found')
expect(service.execute(contact_id: other_contact.id)).to eq('Contact not found')
end
end
end

View File

@@ -19,19 +19,8 @@ RSpec.describe Captain::Tools::Copilot::GetConversationService do
end
describe '#parameters' do
it 'returns the expected parameter schema' do
expect(service.parameters).to eq(
{
type: 'object',
properties: {
conversation_id: {
type: 'number',
description: 'The ID of the conversation to retrieve'
}
},
required: %w[conversation_id]
}
)
it 'defines conversation_id parameter' do
expect(service.parameters.keys).to contain_exactly(:conversation_id)
end
end
@@ -107,15 +96,9 @@ RSpec.describe Captain::Tools::Copilot::GetConversationService do
end
describe '#execute' do
context 'when conversation_id is blank' do
it 'returns error message' do
expect(service.execute({})).to eq('Missing required parameters')
end
end
context 'when conversation is not found' do
it 'returns not found message' do
expect(service.execute({ 'conversation_id' => 999 })).to eq('Conversation not found')
expect(service.execute(conversation_id: 999)).to eq('Conversation not found')
end
end
@@ -124,7 +107,7 @@ RSpec.describe Captain::Tools::Copilot::GetConversationService do
let(:conversation) { create(:conversation, account: account, inbox: inbox) }
it 'returns the conversation in llm text format' do
result = service.execute({ 'conversation_id' => conversation.display_id })
result = service.execute(conversation_id: conversation.display_id)
expect(result).to eq(conversation.to_llm_text)
end
@@ -143,7 +126,7 @@ RSpec.describe Captain::Tools::Copilot::GetConversationService do
content: 'Private note content',
private: true)
result = service.execute({ 'conversation_id' => conversation.display_id })
result = service.execute(conversation_id: conversation.display_id)
# Verify that the result includes both regular and private messages
expect(result).to include('Regular message')
@@ -157,7 +140,7 @@ RSpec.describe Captain::Tools::Copilot::GetConversationService do
let(:other_conversation) { create(:conversation, account: other_account, inbox: other_inbox) }
it 'returns not found message' do
expect(service.execute({ 'conversation_id' => other_conversation.display_id })).to eq('Conversation not found')
expect(service.execute(conversation_id: other_conversation.display_id)).to eq('Conversation not found')
end
end
end

View File

@@ -18,32 +18,6 @@ RSpec.describe Captain::Tools::Copilot::SearchArticlesService do
end
end
describe '#parameters' do
it 'returns the expected parameter schema' do
expect(service.parameters).to eq(
{
type: 'object',
properties: {
query: {
type: 'string',
description: 'Search articles by title or content (partial match)'
},
category_id: {
type: 'number',
description: 'Filter articles by category ID'
},
status: {
type: 'string',
enum: %w[draft published archived],
description: 'Filter articles by status'
}
},
required: ['query']
}
)
end
end
describe '#active?' do
context 'when user is an admin' do
let(:user) { create(:user, :administrator, account: account) }
@@ -95,15 +69,9 @@ RSpec.describe Captain::Tools::Copilot::SearchArticlesService do
end
describe '#execute' do
context 'when query is blank' do
it 'returns error message' do
expect(service.execute({})).to eq('Missing required parameters')
end
end
context 'when no articles are found' do
it 'returns no articles found message' do
expect(service.execute({ 'query' => 'test' })).to eq('No articles found')
expect(service.execute(query: 'test', category_id: nil, status: nil)).to eq('No articles found')
end
end
@@ -113,7 +81,7 @@ RSpec.describe Captain::Tools::Copilot::SearchArticlesService do
let!(:article2) { create(:article, account: account, portal: portal, author: user, title: 'Test Article 2', content: 'Content 2') }
it 'returns formatted articles with count' do
result = service.execute({ 'query' => 'Test' })
result = service.execute(query: 'Test', category_id: nil, status: nil)
expect(result).to include('Total number of articles: 2')
expect(result).to include(article1.to_llm_text)
expect(result).to include(article2.to_llm_text)
@@ -124,7 +92,7 @@ RSpec.describe Captain::Tools::Copilot::SearchArticlesService do
let!(:article3) { create(:article, account: account, portal: portal, author: user, category: category, title: 'Test Article 3') }
it 'returns only articles from the specified category' do
result = service.execute({ 'query' => 'Test', 'category_id' => category.id })
result = service.execute(query: 'Test', category_id: category.id, status: nil)
expect(result).to include('Total number of articles: 1')
expect(result).to include(article3.to_llm_text)
expect(result).not_to include(article1.to_llm_text)
@@ -137,7 +105,7 @@ RSpec.describe Captain::Tools::Copilot::SearchArticlesService do
let!(:article4) { create(:article, account: account, portal: portal, author: user, title: 'Test Article 4', status: 'draft') }
it 'returns only articles with the specified status' do
result = service.execute({ 'query' => 'Test', 'status' => 'published' })
result = service.execute(query: 'Test', category_id: nil, status: 'published')
expect(result).to include(article3.to_llm_text)
expect(result).not_to include(article4.to_llm_text)
end

View File

@@ -19,27 +19,8 @@ RSpec.describe Captain::Tools::Copilot::SearchContactsService do
end
describe '#parameters' do
it 'returns the expected parameter schema' do
expect(service.parameters).to eq(
{
type: 'object',
properties: {
email: {
type: 'string',
description: 'Filter contacts by email'
},
phone_number: {
type: 'string',
description: 'Filter contacts by phone number'
},
name: {
type: 'string',
description: 'Filter contacts by name (partial match)'
}
},
required: []
}
)
it 'defines email, phone_number, and name parameters' do
expect(service.parameters.keys).to contain_exactly(:email, :phone_number, :name)
end
end
@@ -86,25 +67,25 @@ RSpec.describe Captain::Tools::Copilot::SearchContactsService do
end
it 'returns contacts when filtered by email' do
result = service.execute({ 'email' => 'test1@example.com' })
result = service.execute(email: 'test1@example.com')
expect(result).to include(contact1.to_llm_text)
expect(result).not_to include(contact2.to_llm_text)
end
it 'returns contacts when filtered by phone number' do
result = service.execute({ 'phone_number' => '+1234567890' })
result = service.execute(phone_number: '+1234567890')
expect(result).to include(contact1.to_llm_text)
expect(result).not_to include(contact2.to_llm_text)
end
it 'returns contacts when filtered by name' do
result = service.execute({ 'name' => 'Contact 1' })
result = service.execute(name: 'Contact 1')
expect(result).to include(contact1.to_llm_text)
expect(result).not_to include(contact2.to_llm_text)
end
it 'returns all matching contacts when no filters are provided' do
result = service.execute({})
result = service.execute
expect(result).to include(contact1.to_llm_text)
expect(result).to include(contact2.to_llm_text)
end

View File

@@ -8,7 +8,7 @@ RSpec.describe Captain::Tools::Copilot::SearchConversationsService do
describe '#name' do
it 'returns the correct service name' do
expect(service.name).to eq('search_conversations')
expect(service.name).to eq('search_conversation')
end
end
@@ -19,10 +19,8 @@ RSpec.describe Captain::Tools::Copilot::SearchConversationsService do
end
describe '#parameters' do
it 'returns the correct parameter schema' do
params = service.parameters
expect(params[:type]).to eq('object')
expect(params[:properties]).to include(:contact_id, :status, :priority)
it 'defines the expected parameters' do
expect(service.parameters.keys).to contain_exactly(:status, :contact_id, :priority, :labels)
end
end
@@ -90,35 +88,35 @@ RSpec.describe Captain::Tools::Copilot::SearchConversationsService do
let!(:resolved_conversation) { create(:conversation, account: account, status: 'resolved', priority: 'low') }
it 'returns all conversations when no filters are applied' do
result = service.execute({})
result = service.execute
expect(result).to include('Total number of conversations: 2')
expect(result).to include(open_conversation.to_llm_text(include_contact_details: true))
expect(result).to include(resolved_conversation.to_llm_text(include_contact_details: true))
end
it 'filters conversations by status' do
result = service.execute({ 'status' => 'open' })
result = service.execute(status: 'open')
expect(result).to include('Total number of conversations: 1')
expect(result).to include(open_conversation.to_llm_text(include_contact_details: true))
expect(result).not_to include(resolved_conversation.to_llm_text(include_contact_details: true))
end
it 'filters conversations by contact_id' do
result = service.execute({ 'contact_id' => contact.id })
result = service.execute(contact_id: contact.id)
expect(result).to include('Total number of conversations: 1')
expect(result).to include(open_conversation.to_llm_text(include_contact_details: true))
expect(result).not_to include(resolved_conversation.to_llm_text(include_contact_details: true))
end
it 'filters conversations by priority' do
result = service.execute({ 'priority' => 'high' })
result = service.execute(priority: 'high')
expect(result).to include('Total number of conversations: 1')
expect(result).to include(open_conversation.to_llm_text(include_contact_details: true))
expect(result).not_to include(resolved_conversation.to_llm_text(include_contact_details: true))
end
it 'returns appropriate message when no conversations are found' do
result = service.execute({ 'status' => 'snoozed' })
result = service.execute(status: 'snoozed')
expect(result).to eq('No conversations found')
end
end

View File

@@ -19,19 +19,8 @@ RSpec.describe Captain::Tools::Copilot::SearchLinearIssuesService do
end
describe '#parameters' do
it 'returns the expected parameter schema' do
expect(service.parameters).to eq(
{
type: 'object',
properties: {
term: {
type: 'string',
description: 'The search term to find Linear issues'
}
},
required: %w[term]
}
)
it 'defines term parameter' do
expect(service.parameters.keys).to contain_exactly(:term)
end
end
@@ -76,7 +65,7 @@ RSpec.describe Captain::Tools::Copilot::SearchLinearIssuesService do
describe '#execute' do
context 'when Linear integration is not enabled' do
it 'returns error message' do
expect(service.execute({ 'term' => 'test' })).to eq('Linear integration is not enabled')
expect(service.execute(term: 'test')).to eq('Linear integration is not enabled')
end
end
@@ -89,8 +78,12 @@ RSpec.describe Captain::Tools::Copilot::SearchLinearIssuesService do
end
context 'when term is blank' do
it 'returns error message' do
expect(service.execute({ 'term' => '' })).to eq('Missing required parameters')
before do
allow(linear_service).to receive(:search_issue).with('').and_return({ data: [] })
end
it 'returns no issues found message' do
expect(service.execute(term: '')).to eq('No issues found, I should try another similar search term')
end
end
@@ -100,7 +93,7 @@ RSpec.describe Captain::Tools::Copilot::SearchLinearIssuesService do
end
it 'returns the error message' do
expect(service.execute({ 'term' => 'test' })).to eq('API Error')
expect(service.execute(term: 'test')).to eq('API Error')
end
end
@@ -110,7 +103,7 @@ RSpec.describe Captain::Tools::Copilot::SearchLinearIssuesService do
end
it 'returns no issues found message' do
expect(service.execute({ 'term' => 'test' })).to eq('No issues found, I should try another similar search term')
expect(service.execute(term: 'test')).to eq('No issues found, I should try another similar search term')
end
end
@@ -131,7 +124,7 @@ RSpec.describe Captain::Tools::Copilot::SearchLinearIssuesService do
end
it 'returns formatted issues' do
result = service.execute({ 'term' => 'test' })
result = service.execute(term: 'test')
expect(result).to include('Total number of issues: 1')
expect(result).to include('Title: Test Issue')
expect(result).to include('ID: TEST-123')

View File

@@ -20,19 +20,8 @@ RSpec.describe Captain::Tools::SearchDocumentationService do
end
describe '#parameters' do
it 'returns the required parameters schema' do
expected_schema = {
type: 'object',
properties: {
search_query: {
type: 'string',
description: 'The search query to look up in the documentation.'
}
},
required: ['search_query']
}
expect(service.parameters).to eq(expected_schema)
it 'defines query parameter' do
expect(service.parameters.keys).to contain_exactly(:query)
end
end
@@ -56,7 +45,7 @@ RSpec.describe Captain::Tools::SearchDocumentationService do
end
it 'returns formatted responses for the search query' do
result = service.execute({ 'search_query' => question })
result = service.execute(query: question)
expect(result).to include(question)
expect(result).to include(answer)
@@ -70,7 +59,7 @@ RSpec.describe Captain::Tools::SearchDocumentationService do
end
it 'returns an empty string' do
expect(service.execute({ 'search_query' => question })).to eq('No FAQs found for the given query')
expect(service.execute(query: question)).to eq('No FAQs found for the given query')
end
end
end

View File

@@ -256,52 +256,5 @@ RSpec.describe Integrations::LlmInstrumentation do
end
end
end
describe '#instrument_tool_call' do
let(:tool_name) { 'search_documents' }
let(:arguments) { { query: 'test query' } }
context 'when OTEL provider is not configured' do
before { otel_config.update(value: '') }
it 'executes the block without tracing' do
result = instance.instrument_tool_call(tool_name, arguments) { 'tool_result' }
expect(result).to eq('tool_result')
end
end
context 'when OTEL provider is configured' do
let(:mock_span) { instance_double(OpenTelemetry::Trace::Span) }
let(:mock_tracer) { instance_double(OpenTelemetry::Trace::Tracer) }
before do
allow(mock_span).to receive(:set_attribute)
allow(instance).to receive(:tracer).and_return(mock_tracer)
allow(mock_tracer).to receive(:in_span).and_yield(mock_span)
end
it 'executes the block and returns the result' do
result = instance.instrument_tool_call(tool_name, arguments) { 'tool_result' }
expect(result).to eq('tool_result')
end
it 'propagates instrumentation errors' do
allow(mock_tracer).to receive(:in_span).and_raise(StandardError.new('Instrumentation failed'))
expect do
instance.instrument_tool_call(tool_name, arguments) { 'tool_result' }
end.to raise_error(StandardError, 'Instrumentation failed')
end
it 'creates a span with tool name and sets observation attributes' do
tool_result = { documents: ['doc1'] }
instance.instrument_tool_call(tool_name, arguments) { tool_result }
expect(mock_tracer).to have_received(:in_span).with('tool.search_documents')
expect(mock_span).to have_received(:set_attribute).with('langfuse.observation.input', arguments.to_json)
expect(mock_span).to have_received(:set_attribute).with('langfuse.observation.output', tool_result.to_json)
end
end
end
end
end