Restructure omni services and add Chatwoot research snapshot

This commit is contained in:
Ruslan Bakiev
2026-02-21 11:11:27 +07:00
parent edea7a0034
commit b73babbbf6
7732 changed files with 978203 additions and 32 deletions

View File

@@ -0,0 +1,147 @@
class Captain::Conversation::ResponseBuilderJob < ApplicationJob
MAX_MESSAGE_LENGTH = 10_000
retry_on ActiveStorage::FileNotFoundError, attempts: 3, wait: 2.seconds
retry_on Faraday::BadRequestError, attempts: 3, wait: 2.seconds
def perform(conversation, assistant)
@conversation = conversation
@inbox = conversation.inbox
@assistant = assistant
Current.executed_by = @assistant
if captain_v2_enabled?
generate_response_with_v2
else
generate_and_process_response
end
rescue StandardError => e
raise e if e.is_a?(ActiveStorage::FileNotFoundError) || e.is_a?(Faraday::BadRequestError)
handle_error(e)
ensure
Current.executed_by = nil
end
private
delegate :account, :inbox, to: :@conversation
def generate_and_process_response
@response = Captain::Llm::AssistantChatService.new(assistant: @assistant, conversation_id: @conversation.display_id).generate_response(
message_history: collect_previous_messages
)
process_response
end
def generate_response_with_v2
@response = Captain::Assistant::AgentRunnerService.new(assistant: @assistant, conversation: @conversation).generate_response(
message_history: collect_previous_messages
)
process_response
end
def process_response
ActiveRecord::Base.transaction do
if handoff_requested?
process_action('handoff')
else
create_messages
Rails.logger.info("[CAPTAIN][ResponseBuilderJob] Incrementing response usage for #{account.id}")
account.increment_response_usage
end
end
end
def collect_previous_messages
@conversation
.messages
.where(message_type: [:incoming, :outgoing])
.where(private: false)
.map do |message|
message_hash = {
content: prepare_multimodal_message_content(message),
role: determine_role(message)
}
# Include agent_name if present in additional_attributes
message_hash[:agent_name] = message.additional_attributes['agent_name'] if message.additional_attributes&.dig('agent_name').present?
message_hash
end
end
def determine_role(message)
message.message_type == 'incoming' ? 'user' : 'assistant'
end
def prepare_multimodal_message_content(message)
Captain::OpenAiMessageBuilderService.new(message: message).generate_content
end
def handoff_requested?
@response['response'] == 'conversation_handoff'
end
def process_action(action)
case action
when 'handoff'
I18n.with_locale(@assistant.account.locale) do
create_handoff_message
@conversation.bot_handoff!
send_out_of_office_message_if_applicable
end
end
end
def send_out_of_office_message_if_applicable
# Campaign conversations should never receive OOO templates — the campaign itself
# serves as the initial outreach, and OOO would be confusing in that context.
return if @conversation.campaign.present?
::MessageTemplates::Template::OutOfOffice.perform_if_applicable(@conversation)
end
def create_handoff_message
create_outgoing_message(
@assistant.config['handoff_message'].presence || I18n.t('conversations.captain.handoff')
)
end
def create_messages
validate_message_content!(@response['response'])
create_outgoing_message(@response['response'], agent_name: @response['agent_name'])
end
def validate_message_content!(content)
raise ArgumentError, 'Message content cannot be blank' if content.blank?
end
def create_outgoing_message(message_content, agent_name: nil)
additional_attrs = {}
additional_attrs[:agent_name] = agent_name if agent_name.present?
@conversation.messages.create!(
message_type: :outgoing,
account_id: account.id,
inbox_id: inbox.id,
sender: @assistant,
content: message_content,
additional_attributes: additional_attrs
)
end
def handle_error(error)
log_error(error)
process_action('handoff')
true
end
def log_error(error)
ChatwootExceptionTracker.new(error, account: account).capture_exception
end
def captain_v2_enabled?
account.feature_enabled?('captain_integration_v2')
end
end

View File

@@ -0,0 +1,28 @@
class Captain::Copilot::ResponseJob < ApplicationJob
queue_as :default
def perform(assistant:, conversation_id:, user_id:, copilot_thread_id:, message:)
Rails.logger.info("#{self.class.name} Copilot response job for assistant_id=#{assistant.id} user_id=#{user_id}")
generate_chat_response(
assistant: assistant,
conversation_id: conversation_id,
user_id: user_id,
copilot_thread_id: copilot_thread_id,
message: message
)
end
private
def generate_chat_response(assistant:, conversation_id:, user_id:, copilot_thread_id:, message:)
service = Captain::Copilot::ChatService.new(
assistant,
user_id: user_id,
copilot_thread_id: copilot_thread_id,
conversation_id: conversation_id
)
# When using copilot_thread, message is already in previous_history
# Pass nil to avoid duplicate
service.generate_response(copilot_thread_id.present? ? nil : message)
end
end

View File

@@ -0,0 +1,61 @@
class Captain::Documents::CrawlJob < ApplicationJob
queue_as :low
def perform(document)
if document.pdf_document?
perform_pdf_processing(document)
elsif InstallationConfig.find_by(name: 'CAPTAIN_FIRECRAWL_API_KEY')&.value.present?
perform_firecrawl_crawl(document)
else
perform_simple_crawl(document)
end
end
private
include Captain::FirecrawlHelper
def perform_pdf_processing(document)
Captain::Llm::PdfProcessingService.new(document).process
document.update!(status: :available)
rescue StandardError => e
Rails.logger.error I18n.t('captain.documents.pdf_processing_failed', document_id: document.id, error: e.message)
raise # Re-raise to let job framework handle retry logic
end
def perform_simple_crawl(document)
page_links = Captain::Tools::SimplePageCrawlService.new(document.external_link).page_links
page_links.each do |page_link|
Captain::Tools::SimplePageCrawlParserJob.perform_later(
assistant_id: document.assistant_id,
page_link: page_link
)
end
Captain::Tools::SimplePageCrawlParserJob.perform_later(
assistant_id: document.assistant_id,
page_link: document.external_link
)
end
def perform_firecrawl_crawl(document)
captain_usage_limits = document.account.usage_limits[:captain] || {}
document_limit = captain_usage_limits[:documents] || {}
crawl_limit = [document_limit[:current_available] || 10, 500].min
Captain::Tools::FirecrawlService
.new
.perform(
document.external_link,
firecrawl_webhook_url(document),
crawl_limit
)
end
def firecrawl_webhook_url(document)
webhook_url = Rails.application.routes.url_helpers.enterprise_webhooks_firecrawl_url
"#{webhook_url}?assistant_id=#{document.assistant_id}&token=#{generate_firecrawl_token(document.assistant_id, document.account_id)}"
end
end

View File

@@ -0,0 +1,78 @@
class Captain::Documents::ResponseBuilderJob < ApplicationJob
queue_as :low
def perform(document, options = {})
reset_previous_responses(document)
faqs = generate_faqs(document, options)
create_responses_from_faqs(faqs, document)
end
private
def generate_faqs(document, options)
if should_use_pagination?(document)
generate_paginated_faqs(document, options)
else
generate_standard_faqs(document)
end
end
def generate_paginated_faqs(document, options)
service = build_paginated_service(document, options)
faqs = service.generate
store_paginated_metadata(document, service)
faqs
end
def generate_standard_faqs(document)
Captain::Llm::FaqGeneratorService.new(document.content, document.account.locale_english_name, account_id: document.account_id).generate
end
def build_paginated_service(document, options)
Captain::Llm::PaginatedFaqGeneratorService.new(
document,
pages_per_chunk: options[:pages_per_chunk],
max_pages: options[:max_pages],
language: document.account.locale_english_name
)
end
def store_paginated_metadata(document, service)
document.update!(
metadata: (document.metadata || {}).merge(
'faq_generation' => {
'method' => 'paginated',
'pages_processed' => service.total_pages_processed,
'iterations' => service.iterations_completed,
'timestamp' => Time.current.iso8601
}
)
)
end
def create_responses_from_faqs(faqs, document)
faqs.each { |faq| create_response(faq, document) }
end
def should_use_pagination?(document)
# Auto-detect when to use pagination
# For now, use pagination for PDFs with OpenAI file ID
document.pdf_document? && document.openai_file_id.present?
end
def reset_previous_responses(response_document)
response_document.responses.destroy_all
end
def create_response(faq, document)
document.responses.create!(
question: faq['question'],
answer: faq['answer'],
assistant: document.assistant,
documentable: document
)
rescue ActiveRecord::RecordInvalid => e
Rails.logger.error I18n.t('captain.documents.response_creation_error', error: e.message)
end
end

View File

@@ -0,0 +1,32 @@
class Captain::InboxPendingConversationsResolutionJob < ApplicationJob
queue_as :low
def perform(inbox)
Current.executed_by = inbox.captain_assistant
resolvable_conversations = inbox.conversations.pending.where('last_activity_at < ? ', Time.now.utc - 1.hour).limit(Limits::BULK_ACTIONS_LIMIT)
resolvable_conversations.each do |conversation|
create_outgoing_message(conversation, inbox)
conversation.resolved!
end
ensure
Current.reset
end
private
def create_outgoing_message(conversation, inbox)
I18n.with_locale(inbox.account.locale) do
resolution_message = inbox.captain_assistant.config['resolution_message']
conversation.messages.create!(
{
message_type: :outgoing,
account_id: conversation.account_id,
inbox_id: conversation.inbox_id,
content: resolution_message.presence || I18n.t('conversations.activity.auto_resolution_message'),
sender: inbox.captain_assistant
}
)
end
end
end

View File

@@ -0,0 +1,9 @@
class Captain::Llm::UpdateEmbeddingJob < ApplicationJob
queue_as :low
def perform(record, content)
account_id = record.account_id
embedding = Captain::Llm::EmbeddingService.new(account_id: account_id).get_embedding(content)
record.update!(embedding: embedding)
end
end

View File

@@ -0,0 +1,28 @@
class Captain::Tools::FirecrawlParserJob < ApplicationJob
queue_as :low
def perform(assistant_id:, payload:)
assistant = Captain::Assistant.find(assistant_id)
metadata = payload[:metadata]
canonical_url = normalize_link(metadata['url'])
document = assistant.documents.find_or_initialize_by(
external_link: canonical_url
)
document.update!(
external_link: canonical_url,
content: payload[:markdown],
name: metadata['title'],
status: :available
)
rescue StandardError => e
raise "Failed to parse FireCrawl data: #{e.message}"
end
private
def normalize_link(raw_url)
raw_url.to_s.delete_suffix('/')
end
end

View File

@@ -0,0 +1,39 @@
class Captain::Tools::SimplePageCrawlParserJob < ApplicationJob
queue_as :low
def perform(assistant_id:, page_link:)
assistant = Captain::Assistant.find(assistant_id)
account = assistant.account
if limit_exceeded?(account)
Rails.logger.info("Document limit exceeded for #{assistant_id}")
return
end
crawler = Captain::Tools::SimplePageCrawlService.new(page_link)
page_title = crawler.page_title || ''
content = crawler.body_text_content || ''
normalized_link = normalize_link(page_link)
document = assistant.documents.find_or_initialize_by(external_link: normalized_link)
document.update!(
external_link: normalized_link,
name: page_title[0..254], content: content[0..14_999], status: :available
)
rescue StandardError => e
raise "Failed to parse data: #{page_link} #{e.message}"
end
private
def normalize_link(raw_link)
raw_link.to_s.delete_suffix('/')
end
def limit_exceeded?(account)
limits = account.usage_limits[:captain][:documents]
limits[:current_available].negative? || limits[:current_available].zero?
end
end

View File

@@ -0,0 +1,21 @@
module Enterprise::Account::ConversationsResolutionSchedulerJob
def perform
super
resolve_captain_conversations
end
private
def resolve_captain_conversations
CaptainInbox.all.find_each(batch_size: 100) do |captain_inbox|
inbox = captain_inbox.inbox
next if inbox.email?
Captain::InboxPendingConversationsResolutionJob.perform_later(
inbox
)
end
end
end

View File

@@ -0,0 +1,22 @@
class Enterprise::CloudflareVerificationJob < ApplicationJob
queue_as :default
def perform(portal_id)
portal = Portal.find(portal_id)
return unless portal && portal.custom_domain.present?
result = check_hostname_status(portal)
create_hostname(portal) if result[:errors].present?
end
private
def create_hostname(portal)
Cloudflare::CreateCustomHostnameService.new(portal: portal).perform
end
def check_hostname_status(portal)
Cloudflare::CheckCustomHostnameService.new(portal: portal).perform
end
end

View File

@@ -0,0 +1,7 @@
class Enterprise::CreateStripeCustomerJob < ApplicationJob
queue_as :default
def perform(account)
Enterprise::Billing::CreateStripeCustomerService.new(account: account).perform
end
end

View File

@@ -0,0 +1,26 @@
module Enterprise::DeleteObjectJob
private
def heavy_associations
super.merge(
SlaPolicy => %i[applied_slas]
).freeze
end
def process_post_deletion_tasks(object, user, ip)
create_audit_entry(object, user, ip)
end
def create_audit_entry(object, user, ip)
return unless %w[Inbox Conversation SlaPolicy].include?(object.class.to_s) && user.present?
Enterprise::AuditLog.create(
auditable: object,
audited_changes: object.attributes,
action: 'destroy',
user: user,
associated: object.account,
remote_address: ip
)
end
end

View File

@@ -0,0 +1,30 @@
module Enterprise::Internal::CheckNewVersionsJob
def perform
super
update_plan_info
reconcile_premium_config_and_features
end
private
def update_plan_info
return if @instance_info.blank?
update_installation_config(key: 'INSTALLATION_PRICING_PLAN', value: @instance_info['plan'])
update_installation_config(key: 'INSTALLATION_PRICING_PLAN_QUANTITY', value: @instance_info['plan_quantity'])
update_installation_config(key: 'CHATWOOT_SUPPORT_WEBSITE_TOKEN', value: @instance_info['chatwoot_support_website_token'])
update_installation_config(key: 'CHATWOOT_SUPPORT_IDENTIFIER_HASH', value: @instance_info['chatwoot_support_identifier_hash'])
update_installation_config(key: 'CHATWOOT_SUPPORT_SCRIPT_URL', value: @instance_info['chatwoot_support_script_url'])
end
def update_installation_config(key:, value:)
config = InstallationConfig.find_or_initialize_by(name: key)
config.value = value
config.locked = true
config.save!
end
def reconcile_premium_config_and_features
Internal::ReconcilePlanConfigService.new.perform
end
end

View File

@@ -0,0 +1,11 @@
module Enterprise::TriggerScheduledItemsJob
def perform
super
## Triggers Enterprise specific jobs
####################################
# Triggers Account Sla jobs
Sla::TriggerSlasForAccountsJob.perform_later
end
end

View File

@@ -0,0 +1,9 @@
class Internal::AccountAnalysisJob < ApplicationJob
queue_as :low
def perform(account)
return unless ChatwootApp.chatwoot_cloud?
Internal::AccountAnalysis::ThreatAnalyserService.new(account).perform
end
end

View File

@@ -0,0 +1,21 @@
class Messages::AudioTranscriptionJob < ApplicationJob
queue_as :low
discard_on Faraday::BadRequestError do |job, error|
log_context = {
attachment_id: job.arguments.first,
job_id: job.job_id,
status_code: error.response&.dig(:status)
}
Rails.logger.warn("Discarding audio transcription job due to bad request: #{log_context}")
end
retry_on ActiveStorage::FileNotFoundError, wait: 2.seconds, attempts: 3
def perform(attachment_id)
attachment = Attachment.find_by(id: attachment_id)
return if attachment.blank?
Messages::AudioTranscriptionService.new(attachment).perform
end
end

View File

@@ -0,0 +1,54 @@
class Migration::CompanyAccountBatchJob < ApplicationJob
queue_as :low
def perform(account)
account.contacts
.where.not(email: nil)
.find_in_batches(batch_size: 1000) do |contact_batch|
process_contact_batch(contact_batch, account)
end
end
private
def process_contact_batch(contacts, account)
contacts.each do |contact|
next unless should_process?(contact)
company = find_or_create_company(contact, account)
# rubocop:disable Rails/SkipsModelValidations
contact.update_column(:company_id, company.id) if company
# rubocop:enable Rails/SkipsModelValidations
end
end
def should_process?(contact)
return false if contact.company_id.present?
return false if contact.email.blank?
Companies::BusinessEmailDetectorService.new(contact.email).perform
end
def find_or_create_company(contact, account)
domain = extract_domain(contact.email)
company_name = derive_company_name(contact, domain)
Company.find_or_create_by!(account: account, domain: domain) do |company|
company.name = company_name
end
rescue ActiveRecord::RecordNotUnique
# Race condition: Another job created it between our check and create
# just find the one that was created
Company.find_by(account: account, domain: domain)
end
def extract_domain(email)
email.split('@').last&.downcase
end
def derive_company_name(contact, domain)
contact.additional_attributes&.dig('company_name').presence ||
domain.split('.').first.tr('-_', ' ').titleize
end
end

View File

@@ -0,0 +1,17 @@
class Migration::CompanyBackfillJob < ApplicationJob
queue_as :low
def perform
Rails.logger.info 'Starting company backfill migration...'
account_count = 0
Account.find_in_batches(batch_size: 100) do |accounts|
accounts.each do |account|
Rails.logger.info "Enqueuing company backfill for account #{account.id}"
Migration::CompanyAccountBatchJob.perform_later(account)
account_count += 1
end
end
Rails.logger.info "Company backfill migration complete. Enqueued jobs for #{account_count} accounts."
end
end

View File

@@ -0,0 +1,7 @@
class Portal::ArticleIndexingJob < ApplicationJob
queue_as :low
def perform(article)
article.generate_and_save_article_seach_terms
end
end

View File

@@ -0,0 +1,33 @@
class Saml::UpdateAccountUsersProviderJob < ApplicationJob
queue_as :default
# Updates the authentication provider for users in an account
# This job is triggered when SAML settings are created or destroyed
def perform(account_id, provider)
account = Account.find(account_id)
account.users.find_each(batch_size: 1000) do |user|
next unless should_update_user_provider?(user, provider)
# rubocop:disable Rails/SkipsModelValidations
user.update_column(:provider, provider)
# rubocop:enable Rails/SkipsModelValidations
end
end
private
# Determines if a user's provider should be updated based on their multi-account status
# When resetting to 'email', only update users who don't have SAML enabled on other accounts
# This prevents breaking SAML authentication for users who belong to multiple accounts
def should_update_user_provider?(user, provider)
return !user_has_other_saml_accounts?(user) if provider == 'email'
true
end
# Checks if the user belongs to any other accounts that have SAML configured
# Used to preserve SAML authentication when one account disables SAML but others still use it
def user_has_other_saml_accounts?(user)
user.accounts.joins(:saml_settings).exists?
end
end

View File

@@ -0,0 +1,9 @@
class Sla::ProcessAccountAppliedSlasJob < ApplicationJob
queue_as :medium
def perform(account)
account.applied_slas.where(sla_status: %w[active active_with_misses]).each do |applied_sla|
Sla::ProcessAppliedSlaJob.perform_later(applied_sla)
end
end
end

View File

@@ -0,0 +1,7 @@
class Sla::ProcessAppliedSlaJob < ApplicationJob
queue_as :medium
def perform(applied_sla)
Sla::EvaluateAppliedSlaService.new(applied_sla: applied_sla).perform
end
end

View File

@@ -0,0 +1,10 @@
class Sla::TriggerSlasForAccountsJob < ApplicationJob
queue_as :scheduled_jobs
def perform
Account.joins(:sla_policies).distinct.find_each do |account|
Rails.logger.info "Enqueuing ProcessAccountAppliedSlasJob for account #{account.id}"
Sla::ProcessAccountAppliedSlasJob.perform_later(account)
end
end
end