Restructure omni services and add Chatwoot research snapshot

This commit is contained in:
Ruslan Bakiev
2026-02-21 11:11:27 +07:00
parent edea7a0034
commit b73babbbf6
7732 changed files with 978203 additions and 32 deletions

View File

@@ -0,0 +1,49 @@
require 'ruby_llm'
module Llm::Config
DEFAULT_MODEL = 'gpt-4.1-mini'.freeze
class << self
def initialized?
@initialized ||= false
end
def initialize!
return if @initialized
configure_ruby_llm
@initialized = true
end
def reset!
@initialized = false
end
def with_api_key(api_key, api_base: nil)
context = RubyLLM.context do |config|
config.openai_api_key = api_key
config.openai_api_base = api_base
end
yield context
end
private
def configure_ruby_llm
RubyLLM.configure do |config|
config.openai_api_key = system_api_key if system_api_key.present?
config.openai_api_base = openai_endpoint.chomp('/') if openai_endpoint.present?
config.logger = Rails.logger
end
end
def system_api_key
InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_API_KEY')&.value
end
def openai_endpoint
InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_ENDPOINT')&.value
end
end
end

View File

@@ -0,0 +1,41 @@
module Llm::Models
CONFIG = YAML.load_file(Rails.root.join('config/llm.yml')).freeze
class << self
def providers = CONFIG['providers']
def models = CONFIG['models']
def features = CONFIG['features']
def feature_keys = CONFIG['features'].keys
def default_model_for(feature)
CONFIG.dig('features', feature.to_s, 'default')
end
def models_for(feature)
CONFIG.dig('features', feature.to_s, 'models') || []
end
def valid_model_for?(feature, model_name)
models_for(feature).include?(model_name.to_s)
end
def feature_config(feature_key)
feature = features[feature_key.to_s]
return nil unless feature
{
models: feature['models'].map do |model_name|
model = models[model_name]
{
id: model_name,
display_name: model['display_name'],
provider: model['provider'],
coming_soon: model['coming_soon'],
credit_multiplier: model['credit_multiplier']
}
end,
default: feature['default']
}
end
end
end