diff --git a/enterprise/lib/captain/conversation_completion_service.rb b/enterprise/lib/captain/conversation_completion_service.rb index 6ac45421c..e9cdc8937 100644 --- a/enterprise/lib/captain/conversation_completion_service.rb +++ b/enterprise/lib/captain/conversation_completion_service.rb @@ -56,6 +56,14 @@ class Captain::ConversationCompletionService < Captain::BaseTaskService { complete: false, reason: reason } end + # Prefer the system API key over the account's OpenAI hook key. + # This is an internal operational evaluation, not a customer-triggered feature, + # so it should not consume the customer's OpenAI credits on hosted platforms. + # Falls back to the account hook for self-hosted deployments without a system key. + def api_key + @api_key ||= system_api_key.presence || openai_hook&.settings&.dig('api_key') + end + def event_name 'captain.conversation_completion' end diff --git a/spec/enterprise/lib/captain/conversation_completion_service_spec.rb b/spec/enterprise/lib/captain/conversation_completion_service_spec.rb index 51d7fa807..58b2b2ce6 100644 --- a/spec/enterprise/lib/captain/conversation_completion_service_spec.rb +++ b/spec/enterprise/lib/captain/conversation_completion_service_spec.rb @@ -126,6 +126,33 @@ RSpec.describe Captain::ConversationCompletionService do end end + context 'when account has its own OpenAI hook' do + before do + create(:message, conversation: conversation, message_type: :incoming, content: 'Hello') + create(:integrations_hook, :openai, account: account, settings: { 'api_key' => 'customer-own-key' }) + end + + it 'uses the system API key instead of the account hook key' do + expect(Llm::Config).to receive(:with_api_key).with('test-key', api_base: anything).and_yield(mock_context) + allow(mock_chat).to receive(:ask).and_return( + instance_double(RubyLLM::Message, content: { 'complete' => true, 'reason' => 'Done' }, input_tokens: 10, output_tokens: 5) + ) + + service.perform + end + + it 'falls back to the account hook key when no system key exists' do + InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_API_KEY').update!(value: nil) + + expect(Llm::Config).to receive(:with_api_key).with('customer-own-key', api_base: anything).and_yield(mock_context) + allow(mock_chat).to receive(:ask).and_return( + instance_double(RubyLLM::Message, content: { 'complete' => true, 'reason' => 'Done' }, input_tokens: 10, output_tokens: 5) + ) + + service.perform + end + end + context 'when customer quota is exhausted' do let(:mock_response) do instance_double(