openRouter constant

OpenRouter configuration using OpenAI-compatible interface

Implementation

static const OpenAICompatibleProviderConfig openRouter =
    OpenAICompatibleProviderConfig(
  providerId: 'openrouter',
  displayName: 'OpenRouter',
  description: 'OpenRouter unified API for multiple AI models',
  defaultBaseUrl: ProviderDefaults.openRouterBaseUrl,
  defaultModel: ProviderDefaults.openRouterDefaultModel,
  supportedCapabilities: {
    LLMCapability.chat,
    LLMCapability.streaming,
    LLMCapability.toolCalling,
    LLMCapability.vision,
    LLMCapability.liveSearch,
  },
  supportsReasoningEffort: false,
  supportsStructuredOutput: true,
  // OpenRouter supports web search through plugin system
  parameterMappings: {
    'search_prompt': 'search_prompt',
    'use_online_shortcut': 'use_online_shortcut',
  },
  modelConfigs: {
    'openai/gpt-4': ModelCapabilityConfig(
      supportsReasoning: false,
      supportsVision: true,
      supportsToolCalling: true,
      maxContextLength: 8192,
    ),
    'anthropic/claude-3.5-sonnet': ModelCapabilityConfig(
      supportsReasoning: false,
      supportsVision: true,
      supportsToolCalling: true,
      maxContextLength: 200000,
    ),
  },
);