Coverage for mcpgateway / services / llm_provider_service.py: 96%
276 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-02-11 07:10 +0000
« prev ^ index » next coverage.py v7.13.1, created at 2026-02-11 07:10 +0000
1# -*- coding: utf-8 -*-
2"""Location: ./mcpgateway/services/llm_provider_service.py
3Copyright 2025
4SPDX-License-Identifier: Apache-2.0
6LLM Provider Service
8This module implements LLM provider management for the MCP Gateway.
9It handles provider registration, CRUD operations, model management,
10and health checks for the internal LLM Chat feature.
11"""
13# Standard
14from datetime import datetime, timezone
15from typing import List, Optional, Tuple
17# Third-Party
18import httpx
19from sqlalchemy import and_, func, select
20from sqlalchemy.exc import IntegrityError
21from sqlalchemy.orm import Session
23# First-Party
24from mcpgateway.db import LLMModel, LLMProvider, LLMProviderType
25from mcpgateway.llm_schemas import (
26 GatewayModelInfo,
27 HealthStatus,
28 LLMModelCreate,
29 LLMModelResponse,
30 LLMModelUpdate,
31 LLMProviderCreate,
32 LLMProviderResponse,
33 LLMProviderUpdate,
34 ProviderHealthCheck,
35)
36from mcpgateway.services.logging_service import LoggingService
37from mcpgateway.utils.create_slug import slugify
38from mcpgateway.utils.services_auth import decode_auth, encode_auth
40# Initialize logging
41logging_service = LoggingService()
42logger = logging_service.get_logger(__name__)
45class LLMProviderError(Exception):
46 """Base class for LLM provider-related errors."""
49class LLMProviderNotFoundError(LLMProviderError):
50 """Raised when a requested LLM provider is not found."""
53class LLMProviderNameConflictError(LLMProviderError):
54 """Raised when an LLM provider name conflicts with an existing one."""
56 def __init__(self, name: str, provider_id: Optional[str] = None):
57 """Initialize the exception.
59 Args:
60 name: The conflicting provider name.
61 provider_id: Optional ID of the existing provider.
62 """
63 self.name = name
64 self.provider_id = provider_id
65 message = f"LLM Provider already exists with name: {name}"
66 if provider_id:
67 message += f" (ID: {provider_id})"
68 super().__init__(message)
71class LLMModelNotFoundError(LLMProviderError):
72 """Raised when a requested LLM model is not found."""
75class LLMModelConflictError(LLMProviderError):
76 """Raised when an LLM model conflicts with an existing one."""
79class LLMProviderService:
80 """Service for managing LLM providers and models.
82 Provides methods to create, list, retrieve, update, and delete
83 provider and model records. Also supports health checks.
84 """
86 def __init__(self) -> None:
87 """Initialize a new LLMProviderService instance."""
88 self._initialized = False
90 async def initialize(self) -> None:
91 """Initialize the LLM provider service."""
92 if not self._initialized: 92 ↛ exitline 92 didn't return from function 'initialize' because the condition on line 92 was always true
93 logger.info("Initializing LLM Provider Service")
94 self._initialized = True
96 async def shutdown(self) -> None:
97 """Shutdown the LLM provider service."""
98 if self._initialized: 98 ↛ exitline 98 didn't return from function 'shutdown' because the condition on line 98 was always true
99 logger.info("Shutting down LLM Provider Service")
100 self._initialized = False
102 # ---------------------------------------------------------------------------
103 # Provider CRUD Operations
104 # ---------------------------------------------------------------------------
106 def create_provider(
107 self,
108 db: Session,
109 provider_data: LLMProviderCreate,
110 created_by: Optional[str] = None,
111 ) -> LLMProvider:
112 """Create a new LLM provider.
114 Args:
115 db: Database session.
116 provider_data: Provider data to create.
117 created_by: Username of creator.
119 Returns:
120 Created LLMProvider instance.
122 Raises:
123 LLMProviderNameConflictError: If provider name already exists.
124 """
125 # Check for name conflict
126 existing = db.execute(select(LLMProvider).where(LLMProvider.name == provider_data.name)).scalar_one_or_none()
128 if existing:
129 raise LLMProviderNameConflictError(provider_data.name, existing.id)
131 # Encrypt API key if provided
132 encrypted_api_key = None
133 if provider_data.api_key:
134 encrypted_api_key = encode_auth({"api_key": provider_data.api_key})
136 # Create provider
137 provider = LLMProvider(
138 name=provider_data.name,
139 slug=slugify(provider_data.name),
140 description=provider_data.description,
141 provider_type=provider_data.provider_type.value,
142 api_key=encrypted_api_key,
143 api_base=provider_data.api_base,
144 api_version=provider_data.api_version,
145 config=provider_data.config,
146 default_model=provider_data.default_model,
147 default_temperature=provider_data.default_temperature,
148 default_max_tokens=provider_data.default_max_tokens,
149 enabled=provider_data.enabled,
150 plugin_ids=provider_data.plugin_ids,
151 created_by=created_by,
152 )
154 try:
155 db.add(provider)
156 db.commit()
157 db.refresh(provider)
158 logger.info(f"Created LLM provider: {provider.name} (ID: {provider.id})")
159 return provider
160 except IntegrityError as e:
161 db.rollback()
162 logger.error(f"Failed to create LLM provider: {e}")
163 raise LLMProviderNameConflictError(provider_data.name)
165 def get_provider(self, db: Session, provider_id: str) -> LLMProvider:
166 """Get an LLM provider by ID.
168 Args:
169 db: Database session.
170 provider_id: Provider ID to retrieve.
172 Returns:
173 LLMProvider instance.
175 Raises:
176 LLMProviderNotFoundError: If provider not found.
177 """
178 provider = db.execute(select(LLMProvider).where(LLMProvider.id == provider_id)).scalar_one_or_none()
180 if not provider:
181 raise LLMProviderNotFoundError(f"Provider not found: {provider_id}")
183 return provider
185 def get_provider_by_slug(self, db: Session, slug: str) -> LLMProvider:
186 """Get an LLM provider by slug.
188 Args:
189 db: Database session.
190 slug: Provider slug to retrieve.
192 Returns:
193 LLMProvider instance.
195 Raises:
196 LLMProviderNotFoundError: If provider not found.
197 """
198 provider = db.execute(select(LLMProvider).where(LLMProvider.slug == slug)).scalar_one_or_none()
200 if not provider:
201 raise LLMProviderNotFoundError(f"Provider not found: {slug}")
203 return provider
205 def list_providers(
206 self,
207 db: Session,
208 enabled_only: bool = False,
209 page: int = 1,
210 page_size: int = 50,
211 ) -> Tuple[List[LLMProvider], int]:
212 """List all LLM providers.
214 Args:
215 db: Database session.
216 enabled_only: Only return enabled providers.
217 page: Page number (1-indexed).
218 page_size: Items per page.
220 Returns:
221 Tuple of (providers list, total count).
222 """
223 query = select(LLMProvider)
225 if enabled_only:
226 query = query.where(LLMProvider.enabled.is_(True))
228 # Get total count efficiently using func.count()
229 count_query = select(func.count(LLMProvider.id)) # pylint: disable=not-callable
230 if enabled_only:
231 count_query = count_query.where(LLMProvider.enabled.is_(True))
232 total = db.execute(count_query).scalar() or 0
234 # Apply pagination
235 offset = (page - 1) * page_size
236 query = query.offset(offset).limit(page_size).order_by(LLMProvider.name)
238 providers = list(db.execute(query).scalars().all())
239 return providers, total
241 def update_provider(
242 self,
243 db: Session,
244 provider_id: str,
245 provider_data: LLMProviderUpdate,
246 modified_by: Optional[str] = None,
247 ) -> LLMProvider:
248 """Update an LLM provider.
250 Args:
251 db: Database session.
252 provider_id: Provider ID to update.
253 provider_data: Updated provider data.
254 modified_by: Username of modifier.
256 Returns:
257 Updated LLMProvider instance.
259 Raises:
260 LLMProviderNotFoundError: If provider not found.
261 LLMProviderNameConflictError: If new name conflicts.
262 IntegrityError: If database constraint violation.
263 """
264 provider = self.get_provider(db, provider_id)
266 # Check for name conflict if name is being changed
267 if provider_data.name and provider_data.name != provider.name:
268 existing = db.execute(
269 select(LLMProvider).where(
270 and_(
271 LLMProvider.name == provider_data.name,
272 LLMProvider.id != provider_id,
273 )
274 )
275 ).scalar_one_or_none()
277 if existing:
278 raise LLMProviderNameConflictError(provider_data.name, existing.id)
280 provider.name = provider_data.name
281 provider.slug = slugify(provider_data.name)
283 # Update fields if provided
284 if provider_data.description is not None: 284 ↛ 286line 284 didn't jump to line 286 because the condition on line 284 was always true
285 provider.description = provider_data.description
286 if provider_data.provider_type is not None:
287 provider.provider_type = provider_data.provider_type.value
288 if provider_data.api_key is not None:
289 provider.api_key = encode_auth({"api_key": provider_data.api_key})
290 if provider_data.api_base is not None:
291 provider.api_base = provider_data.api_base
292 if provider_data.api_version is not None:
293 provider.api_version = provider_data.api_version
294 if provider_data.config is not None:
295 provider.config = provider_data.config
296 if provider_data.default_model is not None:
297 provider.default_model = provider_data.default_model
298 if provider_data.default_temperature is not None:
299 provider.default_temperature = provider_data.default_temperature
300 if provider_data.default_max_tokens is not None:
301 provider.default_max_tokens = provider_data.default_max_tokens
302 if provider_data.enabled is not None:
303 provider.enabled = provider_data.enabled
304 if provider_data.plugin_ids is not None:
305 provider.plugin_ids = provider_data.plugin_ids
307 provider.modified_by = modified_by
309 try:
310 db.commit()
311 db.refresh(provider)
312 logger.info(f"Updated LLM provider: {provider.name} (ID: {provider.id})")
313 return provider
314 except IntegrityError as e:
315 db.rollback()
316 logger.error(f"Failed to update LLM provider: {e}")
317 raise
319 def delete_provider(self, db: Session, provider_id: str) -> bool:
320 """Delete an LLM provider.
322 Args:
323 db: Database session.
324 provider_id: Provider ID to delete.
326 Returns:
327 True if deleted successfully.
329 Raises:
330 LLMProviderNotFoundError: If provider not found.
331 """
332 provider = self.get_provider(db, provider_id)
333 provider_name = provider.name
335 db.delete(provider)
336 db.commit()
337 logger.info(f"Deleted LLM provider: {provider_name} (ID: {provider_id})")
338 return True
340 def set_provider_state(self, db: Session, provider_id: str, activate: Optional[bool] = None) -> LLMProvider:
341 """Set provider enabled state.
343 Args:
344 db: Database session.
345 provider_id: Provider ID to update.
346 activate: If provided, sets enabled to this value. If None, inverts current state (legacy behavior).
348 Returns:
349 Updated LLMProvider instance.
350 """
351 provider = self.get_provider(db, provider_id)
352 if activate is None:
353 # Legacy toggle behavior for backward compatibility
354 provider.enabled = not provider.enabled
355 else:
356 provider.enabled = activate
357 db.commit()
358 db.refresh(provider)
359 logger.info(f"Set LLM provider state: {provider.name} enabled={provider.enabled}")
360 return provider
362 # ---------------------------------------------------------------------------
363 # Model CRUD Operations
364 # ---------------------------------------------------------------------------
366 def create_model(
367 self,
368 db: Session,
369 model_data: LLMModelCreate,
370 ) -> LLMModel:
371 """Create a new LLM model.
373 Args:
374 db: Database session.
375 model_data: Model data to create.
377 Returns:
378 Created LLMModel instance.
380 Raises:
381 LLMProviderNotFoundError: If provider not found.
382 LLMModelConflictError: If model already exists for provider.
383 """
384 # Verify provider exists
385 self.get_provider(db, model_data.provider_id)
387 # Check for conflict
388 existing = db.execute(
389 select(LLMModel).where(
390 and_(
391 LLMModel.provider_id == model_data.provider_id,
392 LLMModel.model_id == model_data.model_id,
393 )
394 )
395 ).scalar_one_or_none()
397 if existing:
398 raise LLMModelConflictError(f"Model {model_data.model_id} already exists for provider {model_data.provider_id}")
400 model = LLMModel(
401 provider_id=model_data.provider_id,
402 model_id=model_data.model_id,
403 model_name=model_data.model_name,
404 model_alias=model_data.model_alias,
405 description=model_data.description,
406 supports_chat=model_data.supports_chat,
407 supports_streaming=model_data.supports_streaming,
408 supports_function_calling=model_data.supports_function_calling,
409 supports_vision=model_data.supports_vision,
410 context_window=model_data.context_window,
411 max_output_tokens=model_data.max_output_tokens,
412 enabled=model_data.enabled,
413 deprecated=model_data.deprecated,
414 )
416 try:
417 db.add(model)
418 db.commit()
419 db.refresh(model)
420 logger.info(f"Created LLM model: {model.model_id} (ID: {model.id})")
421 return model
422 except IntegrityError as e:
423 db.rollback()
424 logger.error(f"Failed to create LLM model: {e}")
425 raise LLMModelConflictError(f"Model conflict: {model_data.model_id}")
427 def get_model(self, db: Session, model_id: str) -> LLMModel:
428 """Get an LLM model by ID.
430 Args:
431 db: Database session.
432 model_id: Model ID to retrieve.
434 Returns:
435 LLMModel instance.
437 Raises:
438 LLMModelNotFoundError: If model not found.
439 """
440 model = db.execute(select(LLMModel).where(LLMModel.id == model_id)).scalar_one_or_none()
442 if not model:
443 raise LLMModelNotFoundError(f"Model not found: {model_id}")
445 return model
447 def list_models(
448 self,
449 db: Session,
450 provider_id: Optional[str] = None,
451 enabled_only: bool = False,
452 page: int = 1,
453 page_size: int = 50,
454 ) -> Tuple[List[LLMModel], int]:
455 """List LLM models.
457 Args:
458 db: Database session.
459 provider_id: Filter by provider ID.
460 enabled_only: Only return enabled models.
461 page: Page number (1-indexed).
462 page_size: Items per page.
464 Returns:
465 Tuple of (models list, total count).
466 """
467 query = select(LLMModel)
469 if provider_id:
470 query = query.where(LLMModel.provider_id == provider_id)
471 if enabled_only:
472 query = query.where(LLMModel.enabled.is_(True))
474 # Get total count efficiently using func.count()
475 count_query = select(func.count(LLMModel.id)) # pylint: disable=not-callable
476 if provider_id:
477 count_query = count_query.where(LLMModel.provider_id == provider_id)
478 if enabled_only:
479 count_query = count_query.where(LLMModel.enabled.is_(True))
480 total = db.execute(count_query).scalar() or 0
482 # Apply pagination
483 offset = (page - 1) * page_size
484 query = query.offset(offset).limit(page_size).order_by(LLMModel.model_name)
486 models = list(db.execute(query).scalars().all())
487 return models, total
489 def update_model(
490 self,
491 db: Session,
492 model_id: str,
493 model_data: LLMModelUpdate,
494 ) -> LLMModel:
495 """Update an LLM model.
497 Args:
498 db: Database session.
499 model_id: Model ID to update.
500 model_data: Updated model data.
502 Returns:
503 Updated LLMModel instance.
504 """
505 model = self.get_model(db, model_id)
507 if model_data.model_id is not None: 507 ↛ 509line 507 didn't jump to line 509 because the condition on line 507 was always true
508 model.model_id = model_data.model_id
509 if model_data.model_name is not None: 509 ↛ 511line 509 didn't jump to line 511 because the condition on line 509 was always true
510 model.model_name = model_data.model_name
511 if model_data.model_alias is not None: 511 ↛ 513line 511 didn't jump to line 513 because the condition on line 511 was always true
512 model.model_alias = model_data.model_alias
513 if model_data.description is not None: 513 ↛ 515line 513 didn't jump to line 515 because the condition on line 513 was always true
514 model.description = model_data.description
515 if model_data.supports_chat is not None: 515 ↛ 517line 515 didn't jump to line 517 because the condition on line 515 was always true
516 model.supports_chat = model_data.supports_chat
517 if model_data.supports_streaming is not None: 517 ↛ 519line 517 didn't jump to line 519 because the condition on line 517 was always true
518 model.supports_streaming = model_data.supports_streaming
519 if model_data.supports_function_calling is not None: 519 ↛ 521line 519 didn't jump to line 521 because the condition on line 519 was always true
520 model.supports_function_calling = model_data.supports_function_calling
521 if model_data.supports_vision is not None: 521 ↛ 523line 521 didn't jump to line 523 because the condition on line 521 was always true
522 model.supports_vision = model_data.supports_vision
523 if model_data.context_window is not None: 523 ↛ 525line 523 didn't jump to line 525 because the condition on line 523 was always true
524 model.context_window = model_data.context_window
525 if model_data.max_output_tokens is not None: 525 ↛ 527line 525 didn't jump to line 527 because the condition on line 525 was always true
526 model.max_output_tokens = model_data.max_output_tokens
527 if model_data.enabled is not None: 527 ↛ 529line 527 didn't jump to line 529 because the condition on line 527 was always true
528 model.enabled = model_data.enabled
529 if model_data.deprecated is not None: 529 ↛ 532line 529 didn't jump to line 532 because the condition on line 529 was always true
530 model.deprecated = model_data.deprecated
532 db.commit()
533 db.refresh(model)
534 logger.info(f"Updated LLM model: {model.model_id} (ID: {model.id})")
535 return model
537 def delete_model(self, db: Session, model_id: str) -> bool:
538 """Delete an LLM model.
540 Args:
541 db: Database session.
542 model_id: Model ID to delete.
544 Returns:
545 True if deleted successfully.
546 """
547 model = self.get_model(db, model_id)
548 model_name = model.model_id
550 db.delete(model)
551 db.commit()
552 logger.info(f"Deleted LLM model: {model_name} (ID: {model_id})")
553 return True
555 def set_model_state(self, db: Session, model_id: str, activate: Optional[bool] = None) -> LLMModel:
556 """Set model enabled state.
558 Args:
559 db: Database session.
560 model_id: Model ID to update.
561 activate: If provided, sets enabled to this value. If None, inverts current state (legacy behavior).
563 Returns:
564 Updated LLMModel instance.
565 """
566 model = self.get_model(db, model_id)
567 if activate is None:
568 # Legacy toggle behavior for backward compatibility
569 model.enabled = not model.enabled
570 else:
571 model.enabled = activate
572 db.commit()
573 db.refresh(model)
574 logger.info(f"Set LLM model state: {model.model_id} enabled={model.enabled}")
575 return model
577 # ---------------------------------------------------------------------------
578 # Gateway Models (for LLM Chat dropdown)
579 # ---------------------------------------------------------------------------
581 def get_gateway_models(self, db: Session) -> List[GatewayModelInfo]:
582 """Get enabled models for the LLM Chat dropdown.
584 Args:
585 db: Database session.
587 Returns:
588 List of GatewayModelInfo for enabled models.
589 """
590 # Get enabled models from enabled providers
591 query = (
592 select(LLMModel, LLMProvider)
593 .join(LLMProvider, LLMModel.provider_id == LLMProvider.id)
594 .where(
595 and_(
596 LLMModel.enabled.is_(True),
597 LLMProvider.enabled.is_(True),
598 LLMModel.supports_chat.is_(True),
599 )
600 )
601 .order_by(LLMProvider.name, LLMModel.model_name)
602 )
604 results = db.execute(query).all()
606 models = []
607 for model, provider in results:
608 models.append(
609 GatewayModelInfo(
610 id=model.id,
611 model_id=model.model_id,
612 model_name=model.model_name,
613 provider_id=provider.id,
614 provider_name=provider.name,
615 provider_type=provider.provider_type,
616 supports_streaming=model.supports_streaming,
617 supports_function_calling=model.supports_function_calling,
618 supports_vision=model.supports_vision,
619 )
620 )
622 return models
624 # ---------------------------------------------------------------------------
625 # Health Check Operations
626 # ---------------------------------------------------------------------------
628 async def check_provider_health(
629 self,
630 db: Session,
631 provider_id: str,
632 ) -> ProviderHealthCheck:
633 """Check health of an LLM provider.
635 Args:
636 db: Database session.
637 provider_id: Provider ID to check.
639 Returns:
640 ProviderHealthCheck result.
641 """
642 provider = self.get_provider(db, provider_id)
644 start_time = datetime.now(timezone.utc)
645 status = HealthStatus.UNKNOWN
646 error_msg = None
647 response_time_ms = None
649 try:
650 # Get API key
651 api_key = None
652 if provider.api_key:
653 auth_data = decode_auth(provider.api_key)
654 api_key = auth_data.get("api_key")
656 # Perform health check based on provider type using shared HTTP client
657 # First-Party
658 from mcpgateway.services.http_client_service import get_http_client # pylint: disable=import-outside-toplevel
660 client = await get_http_client()
661 if provider.provider_type == LLMProviderType.OPENAI:
662 # Check OpenAI models endpoint
663 headers = {"Authorization": f"Bearer {api_key}"}
664 base_url = provider.api_base or "https://api.openai.com/v1"
665 response = await client.get(f"{base_url}/models", headers=headers, timeout=10.0)
666 if response.status_code == 200:
667 status = HealthStatus.HEALTHY
668 else:
669 status = HealthStatus.UNHEALTHY
670 error_msg = f"HTTP {response.status_code}"
672 elif provider.provider_type == LLMProviderType.OLLAMA:
673 # Check Ollama health endpoint
674 base_url = provider.api_base or "http://localhost:11434"
675 # Handle OpenAI-compatible endpoint (/v1)
676 if base_url.rstrip("/").endswith("/v1"):
677 # Use OpenAI-compatible models endpoint
678 response = await client.get(f"{base_url.rstrip('/')}/models", timeout=10.0)
679 else:
680 # Use native Ollama API
681 response = await client.get(f"{base_url.rstrip('/')}/api/tags", timeout=10.0)
682 if response.status_code == 200:
683 status = HealthStatus.HEALTHY
684 else:
685 status = HealthStatus.UNHEALTHY
686 error_msg = f"HTTP {response.status_code}"
688 else:
689 # Generic check - just verify connectivity
690 if provider.api_base:
691 response = await client.get(provider.api_base, timeout=5.0)
692 status = HealthStatus.HEALTHY if response.status_code < 500 else HealthStatus.UNHEALTHY
693 else:
694 status = HealthStatus.UNKNOWN
695 error_msg = "No API base URL configured"
697 except httpx.TimeoutException:
698 status = HealthStatus.UNHEALTHY
699 error_msg = "Connection timeout"
700 except httpx.RequestError as e:
701 status = HealthStatus.UNHEALTHY
702 error_msg = f"Connection error: {str(e)}"
703 except Exception as e:
704 status = HealthStatus.UNHEALTHY
705 error_msg = f"Error: {str(e)}"
707 end_time = datetime.now(timezone.utc)
708 response_time_ms = (end_time - start_time).total_seconds() * 1000
710 # Update provider health status
711 provider.health_status = status.value
712 provider.last_health_check = end_time
713 db.commit()
715 return ProviderHealthCheck(
716 provider_id=provider.id,
717 provider_name=provider.name,
718 provider_type=provider.provider_type,
719 status=status,
720 response_time_ms=response_time_ms,
721 error=error_msg,
722 checked_at=end_time,
723 )
725 def to_provider_response(
726 self,
727 provider: LLMProvider,
728 model_count: int = 0,
729 ) -> LLMProviderResponse:
730 """Convert LLMProvider to LLMProviderResponse.
732 Args:
733 provider: LLMProvider instance.
734 model_count: Number of models for this provider.
736 Returns:
737 LLMProviderResponse instance.
738 """
739 return LLMProviderResponse(
740 id=provider.id,
741 name=provider.name,
742 slug=provider.slug,
743 description=provider.description,
744 provider_type=provider.provider_type,
745 api_base=provider.api_base,
746 api_version=provider.api_version,
747 config=provider.config,
748 default_model=provider.default_model,
749 default_temperature=provider.default_temperature,
750 default_max_tokens=provider.default_max_tokens,
751 enabled=provider.enabled,
752 health_status=provider.health_status,
753 last_health_check=provider.last_health_check,
754 plugin_ids=provider.plugin_ids,
755 created_at=provider.created_at,
756 updated_at=provider.updated_at,
757 created_by=provider.created_by,
758 modified_by=provider.modified_by,
759 model_count=model_count,
760 )
762 def to_model_response(
763 self,
764 model: LLMModel,
765 provider: Optional[LLMProvider] = None,
766 ) -> LLMModelResponse:
767 """Convert LLMModel to LLMModelResponse.
769 Args:
770 model: LLMModel instance.
771 provider: Optional provider for name/type info.
773 Returns:
774 LLMModelResponse instance.
775 """
776 return LLMModelResponse(
777 id=model.id,
778 provider_id=model.provider_id,
779 model_id=model.model_id,
780 model_name=model.model_name,
781 model_alias=model.model_alias,
782 description=model.description,
783 supports_chat=model.supports_chat,
784 supports_streaming=model.supports_streaming,
785 supports_function_calling=model.supports_function_calling,
786 supports_vision=model.supports_vision,
787 context_window=model.context_window,
788 max_output_tokens=model.max_output_tokens,
789 enabled=model.enabled,
790 deprecated=model.deprecated,
791 created_at=model.created_at,
792 updated_at=model.updated_at,
793 provider_name=provider.name if provider else None,
794 provider_type=provider.provider_type if provider else None,
795 )