Restructure omni services and add Chatwoot research snapshot

This commit is contained in:
Ruslan Bakiev
2026-02-21 11:11:27 +07:00
parent edea7a0034
commit b73babbbf6
7732 changed files with 978203 additions and 32 deletions

View File

@@ -0,0 +1,31 @@
# frozen_string_literal: true
# Base service for LLM operations using RubyLLM.
# New features should inherit from this class.
class Llm::BaseAiService
DEFAULT_MODEL = Llm::Config::DEFAULT_MODEL
DEFAULT_TEMPERATURE = 1.0
attr_reader :model, :temperature
def initialize
Llm::Config.initialize!
setup_model
setup_temperature
end
def chat(model: @model, temperature: @temperature)
RubyLLM.chat(model: model).with_temperature(temperature)
end
private
def setup_model
config_value = InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_MODEL')&.value
@model = (config_value.presence || DEFAULT_MODEL)
end
def setup_temperature
@temperature = DEFAULT_TEMPERATURE
end
end

View File

@@ -0,0 +1,36 @@
# frozen_string_literal: true
# DEPRECATED: This class uses the legacy OpenAI Ruby gem directly.
# Only used for PDF/file operations that require OpenAI's files API:
# - Captain::Llm::PdfProcessingService (files.upload for assistants)
# - Captain::Llm::PaginatedFaqGeneratorService (uses file_id from uploaded files)
#
# For all other LLM operations, use Llm::BaseAiService with RubyLLM instead.
class Llm::LegacyBaseOpenAiService
DEFAULT_MODEL = 'gpt-4.1-mini'
attr_reader :client, :model
def initialize
@client = OpenAI::Client.new(
access_token: InstallationConfig.find_by!(name: 'CAPTAIN_OPEN_AI_API_KEY').value,
uri_base: uri_base,
log_errors: Rails.env.development?
)
setup_model
rescue StandardError => e
raise "Failed to initialize OpenAI client: #{e.message}"
end
private
def uri_base
endpoint = InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_ENDPOINT')&.value
endpoint.presence || 'https://api.openai.com/'
end
def setup_model
config_value = InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_MODEL')&.value
@model = (config_value.presence || DEFAULT_MODEL)
end
end