mirror of
https://gh.wpcy.net/https://github.com/discourse/discourse.git
synced 2026-05-07 07:04:12 +08:00
72 lines
1.8 KiB
Ruby
72 lines
1.8 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
class LlmModelSerializer < ApplicationSerializer
|
|
# TODO: we probably should rename the table LlmModel to AiLlm
|
|
# it is consistent with AiAgent and AiTool
|
|
# LLM model is a bit confusing given that large language model model is a confusing
|
|
# name
|
|
root "ai_llm"
|
|
attributes :id,
|
|
:display_name,
|
|
:name,
|
|
:provider,
|
|
:max_prompt_tokens,
|
|
:max_output_tokens,
|
|
:tokenizer,
|
|
:api_key,
|
|
:ai_secret_id,
|
|
:url,
|
|
:provider_params,
|
|
:vision_enabled,
|
|
:input_cost,
|
|
:output_cost,
|
|
:cached_input_cost,
|
|
:cache_write_cost,
|
|
:used_by,
|
|
:seeded,
|
|
:allowed_attachment_types
|
|
|
|
has_one :user, serializer: BasicUserSerializer, embed: :object
|
|
has_many :llm_quotas, serializer: LlmQuotaSerializer, embed: :objects
|
|
has_one :llm_credit_allocation,
|
|
serializer: LlmCreditAllocationSerializer,
|
|
embed: :object,
|
|
if: :include_credit_allocation?
|
|
has_many :llm_feature_credit_costs,
|
|
serializer: LlmFeatureCreditCostSerializer,
|
|
embed: :objects,
|
|
if: :include_credit_allocation?
|
|
|
|
def used_by
|
|
llm_usage =
|
|
(
|
|
if (scope && scope[:llm_usage])
|
|
scope[:llm_usage]
|
|
else
|
|
DiscourseAi::Configuration::LlmEnumerator.global_usage
|
|
end
|
|
)
|
|
|
|
llm_usage[object.id]
|
|
end
|
|
|
|
def api_key
|
|
object.seeded? ? "********" : object.api_key
|
|
end
|
|
|
|
def url
|
|
object.seeded? ? "********" : object.url
|
|
end
|
|
|
|
def provider
|
|
object.seeded? ? "CDCK" : object.provider
|
|
end
|
|
|
|
def include_credit_allocation?
|
|
object.credit_system_enabled?
|
|
end
|
|
|
|
def seeded
|
|
object.seeded?
|
|
end
|
|
end
|