fix: captain json parsing (#13708)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com> Co-authored-by: Shivam Mishra <scm.mymail@gmail.com>
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
class Captain::Llm::ContactAttributesService < Llm::BaseAiService
|
||||
include Integrations::LlmInstrumentation
|
||||
|
||||
def initialize(assistant, conversation)
|
||||
super()
|
||||
@assistant = assistant
|
||||
@@ -52,7 +53,7 @@ class Captain::Llm::ContactAttributesService < Llm::BaseAiService
|
||||
def parse_response(content)
|
||||
return [] if content.nil?
|
||||
|
||||
JSON.parse(content.strip).fetch('attributes', [])
|
||||
JSON.parse(sanitize_json_response(content)).fetch('attributes', [])
|
||||
rescue JSON::ParserError => e
|
||||
Rails.logger.error "Error in parsing GPT processed response: #{e.message}"
|
||||
[]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
class Captain::Llm::ContactNotesService < Llm::BaseAiService
|
||||
include Integrations::LlmInstrumentation
|
||||
|
||||
def initialize(assistant, conversation)
|
||||
super()
|
||||
@assistant = assistant
|
||||
@@ -55,7 +56,7 @@ class Captain::Llm::ContactNotesService < Llm::BaseAiService
|
||||
def parse_response(response)
|
||||
return [] if response.nil?
|
||||
|
||||
JSON.parse(response.strip).fetch('notes', [])
|
||||
JSON.parse(sanitize_json_response(response)).fetch('notes', [])
|
||||
rescue JSON::ParserError => e
|
||||
Rails.logger.error "Error in parsing GPT processed response: #{e.message}"
|
||||
[]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
class Captain::Llm::ConversationFaqService < Llm::BaseAiService
|
||||
include Integrations::LlmInstrumentation
|
||||
|
||||
DISTANCE_THRESHOLD = 0.3
|
||||
|
||||
def initialize(assistant, conversation)
|
||||
@@ -118,7 +119,7 @@ class Captain::Llm::ConversationFaqService < Llm::BaseAiService
|
||||
def parse_response(response)
|
||||
return [] if response.nil?
|
||||
|
||||
JSON.parse(response.strip).fetch('faqs', [])
|
||||
JSON.parse(sanitize_json_response(response)).fetch('faqs', [])
|
||||
rescue JSON::ParserError => e
|
||||
Rails.logger.error "Error in parsing GPT processed response: #{e.message}"
|
||||
[]
|
||||
|
||||
@@ -47,7 +47,7 @@ class Captain::Llm::FaqGeneratorService < Llm::BaseAiService
|
||||
def parse_response(content)
|
||||
return [] if content.nil?
|
||||
|
||||
JSON.parse(content.strip).fetch('faqs', [])
|
||||
JSON.parse(sanitize_json_response(content)).fetch('faqs', [])
|
||||
rescue JSON::ParserError => e
|
||||
Rails.logger.error "Error in parsing GPT processed response: #{e.message}"
|
||||
[]
|
||||
|
||||
@@ -163,7 +163,7 @@ class Captain::Llm::PaginatedFaqGeneratorService < Llm::LegacyBaseOpenAiService
|
||||
content = response.dig('choices', 0, 'message', 'content')
|
||||
return [] if content.nil?
|
||||
|
||||
JSON.parse(content.strip).fetch('faqs', [])
|
||||
JSON.parse(sanitize_json_response(content)).fetch('faqs', [])
|
||||
rescue JSON::ParserError => e
|
||||
Rails.logger.error "Error parsing response: #{e.message}"
|
||||
[]
|
||||
@@ -173,7 +173,7 @@ class Captain::Llm::PaginatedFaqGeneratorService < Llm::LegacyBaseOpenAiService
|
||||
content = response.dig('choices', 0, 'message', 'content')
|
||||
return { 'faqs' => [], 'has_content' => false } if content.nil?
|
||||
|
||||
JSON.parse(content.strip)
|
||||
JSON.parse(sanitize_json_response(content))
|
||||
rescue JSON::ParserError => e
|
||||
Rails.logger.error "Error parsing chunk response: #{e.message}"
|
||||
{ 'faqs' => [], 'has_content' => false }
|
||||
|
||||
@@ -20,6 +20,14 @@ class Llm::BaseAiService
|
||||
|
||||
private
|
||||
|
||||
# Strips markdown code fences (```json ... ``` or ``` ... ```) that some
|
||||
# LLM providers/gateways wrap around JSON responses despite response_format hints.
|
||||
def sanitize_json_response(response)
|
||||
return response if response.nil?
|
||||
|
||||
response.strip.sub(/\A```(?:\w*)\s*\n?/, '').sub(/\n?\s*```\s*\z/, '').strip
|
||||
end
|
||||
|
||||
def setup_model
|
||||
config_value = InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_MODEL')&.value
|
||||
@model = (config_value.presence || DEFAULT_MODEL)
|
||||
|
||||
@@ -24,6 +24,14 @@ class Llm::LegacyBaseOpenAiService
|
||||
|
||||
private
|
||||
|
||||
# Strips markdown code fences (```json ... ``` or ``` ... ```) that some
|
||||
# LLM providers/gateways wrap around JSON responses despite response_format hints.
|
||||
def sanitize_json_response(response)
|
||||
return response if response.nil?
|
||||
|
||||
response.strip.sub(/\A```(?:\w*)\s*\n?/, '').sub(/\n?\s*```\s*\z/, '').strip
|
||||
end
|
||||
|
||||
def uri_base
|
||||
endpoint = InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_ENDPOINT')&.value
|
||||
endpoint.presence || 'https://api.openai.com/'
|
||||
|
||||
35
spec/enterprise/services/llm/base_ai_service_spec.rb
Normal file
35
spec/enterprise/services/llm/base_ai_service_spec.rb
Normal file
@@ -0,0 +1,35 @@
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Llm::BaseAiService do
|
||||
subject(:service) { described_class.new }
|
||||
|
||||
before do
|
||||
create(:installation_config, name: 'CAPTAIN_OPEN_AI_API_KEY', value: 'test-key')
|
||||
end
|
||||
|
||||
describe '#sanitize_json_response' do
|
||||
it 'strips ```json fences' do
|
||||
input = "```json\n{\"key\": \"value\"}\n```"
|
||||
expect(service.send(:sanitize_json_response, input)).to eq('{"key": "value"}')
|
||||
end
|
||||
|
||||
it 'strips bare ``` fences' do
|
||||
input = "```\n{\"key\": \"value\"}\n```"
|
||||
expect(service.send(:sanitize_json_response, input)).to eq('{"key": "value"}')
|
||||
end
|
||||
|
||||
it 'passes through plain JSON unchanged' do
|
||||
input = '{"key": "value"}'
|
||||
expect(service.send(:sanitize_json_response, input)).to eq('{"key": "value"}')
|
||||
end
|
||||
|
||||
it 'returns nil for nil input' do
|
||||
expect(service.send(:sanitize_json_response, nil)).to be_nil
|
||||
end
|
||||
|
||||
it 'strips surrounding whitespace' do
|
||||
input = " \n{\"key\": \"value\"}\n "
|
||||
expect(service.send(:sanitize_json_response, input)).to eq('{"key": "value"}')
|
||||
end
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user