Files
leadchat/enterprise/app/services/captain/llm/contact_attributes_service.rb
Aakash Bakhle fd69b4c8f2 fix: captain json parsing (#13708)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Shivam Mishra <scm.mymail@gmail.com>
2026-03-05 15:43:21 +05:30

62 lines
1.7 KiB
Ruby

class Captain::Llm::ContactAttributesService < Llm::BaseAiService
include Integrations::LlmInstrumentation
def initialize(assistant, conversation)
super()
@assistant = assistant
@conversation = conversation
@contact = conversation.contact
@content = "#Contact\n\n#{@contact.to_llm_text} \n\n#Conversation\n\n#{@conversation.to_llm_text}"
end
def generate_and_update_attributes
generate_attributes
# to implement the update attributes
end
private
attr_reader :content
def generate_attributes
response = instrument_llm_call(instrumentation_params) do
chat
.with_params(response_format: { type: 'json_object' })
.with_instructions(system_prompt)
.ask(@content)
end
parse_response(response.content)
rescue RubyLLM::Error => e
ChatwootExceptionTracker.new(e, account: @conversation.account).capture_exception
[]
end
def instrumentation_params
{
span_name: 'llm.captain.contact_attributes',
model: @model,
temperature: @temperature,
account_id: @conversation.account_id,
feature_name: 'contact_attributes',
messages: [
{ role: 'system', content: system_prompt },
{ role: 'user', content: @content }
],
metadata: { assistant_id: @assistant.id, contact_id: @contact.id }
}
end
def system_prompt
Captain::Llm::SystemPromptsService.attributes_generator
end
def parse_response(content)
return [] if content.nil?
JSON.parse(sanitize_json_response(content)).fetch('attributes', [])
rescue JSON::ParserError => e
Rails.logger.error "Error in parsing GPT processed response: #{e.message}"
[]
end
end