deepseek constant
OpenAICompatibleProviderConfig
const deepseek
DeepSeek configuration using OpenAI-compatible interface
Implementation
static const OpenAICompatibleProviderConfig deepseek =
OpenAICompatibleProviderConfig(
providerId: 'deepseek-openai',
displayName: 'DeepSeek (OpenAI-compatible)',
description: 'DeepSeek AI models using OpenAI-compatible interface',
defaultBaseUrl: ProviderDefaults.deepseekBaseUrl,
defaultModel: ProviderDefaults.deepseekDefaultModel,
supportedCapabilities: {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
LLMCapability.reasoning,
},
// For unknown DeepSeek models, assume basic capabilities
defaultCapabilities: {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
},
allowDynamicCapabilities: true,
supportsReasoningEffort: false,
supportsStructuredOutput: true,
modelConfigs: {
'deepseek-chat': ModelCapabilityConfig(
supportsReasoning: false,
supportsVision: false,
supportsToolCalling: true,
maxContextLength: 32768,
),
'deepseek-reasoner': ModelCapabilityConfig(
supportsReasoning: true,
supportsVision: false,
supportsToolCalling: true,
maxContextLength: 32768,
disableTemperature: true,
disableTopP: true,
),
},
);