discourse/plugins/discourse-ai/app/serializers/llm_model_serializer.rb
Sam e3fae646d4
DEV: AI persona to agent migration (#38319)
Co-authored-by: Keegan George <kgeorge13@gmail.com>
2026-03-10 15:59:45 +11:00

72 lines
1.8 KiB
Ruby

# frozen_string_literal: true
class LlmModelSerializer < ApplicationSerializer
# TODO: we probably should rename the table LlmModel to AiLlm
# it is consistent with AiAgent and AiTool
# LLM model is a bit confusing given that large language model model is a confusing
# name
root "ai_llm"
attributes :id,
:display_name,
:name,
:provider,
:max_prompt_tokens,
:max_output_tokens,
:tokenizer,
:api_key,
:ai_secret_id,
:url,
:provider_params,
:vision_enabled,
:input_cost,
:output_cost,
:cached_input_cost,
:cache_write_cost,
:used_by,
:seeded,
:allowed_attachment_types
has_one :user, serializer: BasicUserSerializer, embed: :object
has_many :llm_quotas, serializer: LlmQuotaSerializer, embed: :objects
has_one :llm_credit_allocation,
serializer: LlmCreditAllocationSerializer,
embed: :object,
if: :include_credit_allocation?
has_many :llm_feature_credit_costs,
serializer: LlmFeatureCreditCostSerializer,
embed: :objects,
if: :include_credit_allocation?
def used_by
llm_usage =
(
if (scope && scope[:llm_usage])
scope[:llm_usage]
else
DiscourseAi::Configuration::LlmEnumerator.global_usage
end
)
llm_usage[object.id]
end
def api_key
object.seeded? ? "********" : object.api_key
end
def url
object.seeded? ? "********" : object.url
end
def provider
object.seeded? ? "CDCK" : object.provider
end
def include_credit_allocation?
object.credit_system_enabled?
end
def seeded
object.seeded?
end
end