Coverage for mcpgateway / routers / llm_config_router.py: 100%
188 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-09 03:05 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-09 03:05 +0000
1# -*- coding: utf-8 -*-
2"""Location: ./mcpgateway/routers/llm_config_router.py
3Copyright 2025
4SPDX-License-Identifier: Apache-2.0
6LLM Configuration Router.
7This module provides FastAPI routes for LLM provider and model management.
8"""
10# Standard
11from typing import Optional
13# Third-Party
14from fastapi import APIRouter, Depends, HTTPException, Query, status
15from sqlalchemy.orm import Session
17# First-Party
18from mcpgateway.auth import get_current_user
19from mcpgateway.db import get_db
20from mcpgateway.llm_schemas import (
21 GatewayModelsResponse,
22 LLMModelCreate,
23 LLMModelListResponse,
24 LLMModelResponse,
25 LLMModelUpdate,
26 LLMProviderCreate,
27 LLMProviderListResponse,
28 LLMProviderResponse,
29 LLMProviderUpdate,
30 ProviderHealthCheck,
31)
32from mcpgateway.middleware.rbac import get_current_user_with_permissions, require_permission
33from mcpgateway.services.llm_provider_service import (
34 LLMModelConflictError,
35 LLMModelNotFoundError,
36 LLMProviderNameConflictError,
37 LLMProviderNotFoundError,
38 LLMProviderService,
39 LLMProviderValidationError,
40)
41from mcpgateway.services.logging_service import LoggingService
43# Initialize logging
44logging_service = LoggingService()
45logger = logging_service.get_logger(__name__)
47# Create router
48llm_config_router = APIRouter()
50# Initialize service
51llm_provider_service = LLMProviderService()
54# ---------------------------------------------------------------------------
55# Provider CRUD Endpoints
56# ---------------------------------------------------------------------------
59@llm_config_router.post(
60 "/providers",
61 response_model=LLMProviderResponse,
62 status_code=status.HTTP_201_CREATED,
63 summary="Create LLM Provider",
64 description="Create a new LLM provider configuration.",
65)
66@require_permission("admin.system_config")
67async def create_provider(
68 provider_data: LLMProviderCreate,
69 current_user_ctx: dict = Depends(get_current_user_with_permissions),
70 db: Session = Depends(get_db),
71) -> LLMProviderResponse:
72 """Create a new LLM provider.
74 Args:
75 provider_data: Provider configuration data.
76 current_user_ctx: Authenticated user context.
77 db: Database session.
79 Returns:
80 Created provider response.
82 Raises:
83 HTTPException: If provider name conflicts or creation fails.
84 """
85 try:
86 provider = llm_provider_service.create_provider(
87 db=db,
88 provider_data=provider_data,
89 created_by=current_user_ctx.get("email"),
90 )
91 model_count = len(provider.models)
92 result = llm_provider_service.to_provider_response(provider, model_count)
93 db.commit()
94 db.close()
95 return result
96 except LLMProviderNameConflictError as e:
97 raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e))
98 except LLMProviderValidationError as e:
99 raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_CONTENT, detail=str(e))
100 except Exception as e:
101 logger.error(f"Failed to create LLM provider: {e}")
102 raise HTTPException(
103 status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
104 detail=f"Failed to create provider: {str(e)}",
105 )
108@llm_config_router.get(
109 "/providers",
110 response_model=LLMProviderListResponse,
111 summary="List LLM Providers",
112 description="List all configured LLM providers.",
113)
114@require_permission("admin.system_config")
115async def list_providers(
116 enabled_only: bool = Query(False, description="Only return enabled providers"),
117 page: int = Query(1, ge=1, description="Page number"),
118 page_size: int = Query(50, ge=1, le=100, description="Items per page"),
119 current_user_ctx: dict = Depends(get_current_user_with_permissions),
120 db: Session = Depends(get_db),
121) -> LLMProviderListResponse:
122 """List all LLM providers.
124 Args:
125 enabled_only: Filter to enabled providers only.
126 page: Page number.
127 page_size: Items per page.
128 current_user_ctx: Authenticated user context.
129 db: Database session.
131 Returns:
132 Paginated list of providers.
133 """
134 providers, total = llm_provider_service.list_providers(
135 db=db,
136 enabled_only=enabled_only,
137 page=page,
138 page_size=page_size,
139 )
141 provider_responses = [llm_provider_service.to_provider_response(p, len(p.models)) for p in providers]
143 result = LLMProviderListResponse(
144 providers=provider_responses,
145 total=total,
146 page=page,
147 page_size=page_size,
148 )
149 db.commit()
150 db.close()
151 return result
154@llm_config_router.get(
155 "/providers/{provider_id}",
156 response_model=LLMProviderResponse,
157 summary="Get LLM Provider",
158 description="Get a specific LLM provider by ID.",
159)
160@require_permission("admin.system_config")
161async def get_provider(
162 provider_id: str,
163 current_user_ctx: dict = Depends(get_current_user_with_permissions),
164 db: Session = Depends(get_db),
165) -> LLMProviderResponse:
166 """Get an LLM provider by ID.
168 Args:
169 provider_id: Provider ID.
170 current_user_ctx: Authenticated user context.
171 db: Database session.
173 Returns:
174 Provider response.
176 Raises:
177 HTTPException: If provider is not found.
178 """
179 try:
180 provider = llm_provider_service.get_provider(db, provider_id)
181 model_count = len(provider.models)
182 result = llm_provider_service.to_provider_response(provider, model_count)
183 db.commit()
184 db.close()
185 return result
186 except LLMProviderNotFoundError as e:
187 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
190@llm_config_router.patch(
191 "/providers/{provider_id}",
192 response_model=LLMProviderResponse,
193 summary="Update LLM Provider",
194 description="Update an existing LLM provider.",
195)
196@require_permission("admin.system_config")
197async def update_provider(
198 provider_id: str,
199 provider_data: LLMProviderUpdate,
200 current_user_ctx: dict = Depends(get_current_user_with_permissions),
201 db: Session = Depends(get_db),
202) -> LLMProviderResponse:
203 """Update an LLM provider.
205 Args:
206 provider_id: Provider ID.
207 provider_data: Updated provider data.
208 current_user_ctx: Authenticated user context.
209 db: Database session.
211 Returns:
212 Updated provider response.
214 Raises:
215 HTTPException: If provider is not found or name conflicts.
216 """
217 try:
218 provider = llm_provider_service.update_provider(
219 db=db,
220 provider_id=provider_id,
221 provider_data=provider_data,
222 modified_by=current_user_ctx.get("email"),
223 )
224 model_count = len(provider.models)
225 result = llm_provider_service.to_provider_response(provider, model_count)
226 db.commit()
227 db.close()
228 return result
229 except LLMProviderNotFoundError as e:
230 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
231 except LLMProviderNameConflictError as e:
232 raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e))
233 except LLMProviderValidationError as e:
234 raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_CONTENT, detail=str(e))
237@llm_config_router.delete(
238 "/providers/{provider_id}",
239 status_code=status.HTTP_204_NO_CONTENT,
240 summary="Delete LLM Provider",
241 description="Delete an LLM provider and all its models.",
242)
243@require_permission("admin.system_config")
244async def delete_provider(
245 provider_id: str,
246 current_user_ctx: dict = Depends(get_current_user_with_permissions),
247 db: Session = Depends(get_db),
248) -> None:
249 """Delete an LLM provider.
251 Args:
252 provider_id: Provider ID.
253 current_user_ctx: Authenticated user context.
254 db: Database session.
256 Raises:
257 HTTPException: If provider is not found.
258 """
259 try:
260 llm_provider_service.delete_provider(db, provider_id)
261 db.commit()
262 db.close()
263 except LLMProviderNotFoundError as e:
264 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
267@llm_config_router.post(
268 "/providers/{provider_id}/state",
269 response_model=LLMProviderResponse,
270 summary="Set LLM Provider State",
271 description="Set the enabled status of an LLM provider.",
272)
273@require_permission("admin.system_config")
274async def set_provider_state(
275 provider_id: str,
276 activate: Optional[bool] = Query(None, description="Set enabled state. If not provided, inverts current state."),
277 current_user_ctx: dict = Depends(get_current_user_with_permissions),
278 db: Session = Depends(get_db),
279) -> LLMProviderResponse:
280 """Set provider enabled state.
282 Args:
283 provider_id: Provider ID.
284 activate: If provided, sets enabled to this value. If None, inverts current state.
285 current_user_ctx: Authenticated user context.
286 db: Database session.
288 Returns:
289 Updated provider response.
291 Raises:
292 HTTPException: If provider is not found.
293 """
294 try:
295 provider = llm_provider_service.set_provider_state(db, provider_id, activate)
296 model_count = len(provider.models)
297 result = llm_provider_service.to_provider_response(provider, model_count)
298 db.commit()
299 db.close()
300 return result
301 except LLMProviderNotFoundError as e:
302 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
305@llm_config_router.post(
306 "/providers/{provider_id}/health",
307 response_model=ProviderHealthCheck,
308 summary="Check Provider Health",
309 description="Perform a health check on an LLM provider.",
310)
311@require_permission("admin.system_config")
312async def check_provider_health(
313 provider_id: str,
314 current_user_ctx: dict = Depends(get_current_user_with_permissions),
315 db: Session = Depends(get_db),
316) -> ProviderHealthCheck:
317 """Check health of an LLM provider.
319 Args:
320 provider_id: Provider ID.
321 current_user_ctx: Authenticated user context.
322 db: Database session.
324 Returns:
325 Health check result.
327 Raises:
328 HTTPException: If provider is not found.
329 """
330 try:
331 result = await llm_provider_service.check_provider_health(db, provider_id)
332 db.commit()
333 db.close()
334 return result
335 except LLMProviderNotFoundError as e:
336 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
339# ---------------------------------------------------------------------------
340# Model CRUD Endpoints
341# ---------------------------------------------------------------------------
344@llm_config_router.post(
345 "/models",
346 response_model=LLMModelResponse,
347 status_code=status.HTTP_201_CREATED,
348 summary="Create LLM Model",
349 description="Create a new LLM model for a provider.",
350)
351@require_permission("admin.system_config")
352async def create_model(
353 model_data: LLMModelCreate,
354 current_user_ctx: dict = Depends(get_current_user_with_permissions),
355 db: Session = Depends(get_db),
356) -> LLMModelResponse:
357 """Create a new LLM model.
359 Args:
360 model_data: Model configuration data.
361 current_user_ctx: Authenticated user context.
362 db: Database session.
364 Returns:
365 Created model response.
367 Raises:
368 HTTPException: If provider is not found or model conflicts.
369 """
370 try:
371 model = llm_provider_service.create_model(db, model_data)
372 provider = llm_provider_service.get_provider(db, model.provider_id)
373 result = llm_provider_service.to_model_response(model, provider)
374 db.commit()
375 db.close()
376 return result
377 except LLMProviderNotFoundError as e:
378 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
379 except LLMModelConflictError as e:
380 raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e))
383@llm_config_router.get(
384 "/models",
385 response_model=LLMModelListResponse,
386 summary="List LLM Models",
387 description="List all configured LLM models.",
388)
389@require_permission("admin.system_config")
390async def list_models(
391 provider_id: Optional[str] = Query(None, description="Filter by provider ID"),
392 enabled_only: bool = Query(False, description="Only return enabled models"),
393 page: int = Query(1, ge=1, description="Page number"),
394 page_size: int = Query(50, ge=1, le=100, description="Items per page"),
395 current_user_ctx: dict = Depends(get_current_user_with_permissions),
396 db: Session = Depends(get_db),
397) -> LLMModelListResponse:
398 """List all LLM models.
400 Args:
401 provider_id: Filter by provider ID.
402 enabled_only: Filter to enabled models only.
403 page: Page number.
404 page_size: Items per page.
405 current_user_ctx: Authenticated user context.
406 db: Database session.
408 Returns:
409 Paginated list of models.
410 """
411 models, total = llm_provider_service.list_models(
412 db=db,
413 provider_id=provider_id,
414 enabled_only=enabled_only,
415 page=page,
416 page_size=page_size,
417 )
419 model_responses = []
420 for model in models:
421 try:
422 provider = llm_provider_service.get_provider(db, model.provider_id)
423 model_responses.append(llm_provider_service.to_model_response(model, provider))
424 except LLMProviderNotFoundError:
425 model_responses.append(llm_provider_service.to_model_response(model))
427 result = LLMModelListResponse(
428 models=model_responses,
429 total=total,
430 page=page,
431 page_size=page_size,
432 )
433 db.commit()
434 db.close()
435 return result
438@llm_config_router.get(
439 "/models/{model_id}",
440 response_model=LLMModelResponse,
441 summary="Get LLM Model",
442 description="Get a specific LLM model by ID.",
443)
444@require_permission("admin.system_config")
445async def get_model(
446 model_id: str,
447 current_user_ctx: dict = Depends(get_current_user_with_permissions),
448 db: Session = Depends(get_db),
449) -> LLMModelResponse:
450 """Get an LLM model by ID.
452 Args:
453 model_id: Model ID.
454 current_user_ctx: Authenticated user context.
455 db: Database session.
457 Returns:
458 Model response.
460 Raises:
461 HTTPException: If model is not found.
462 """
463 try:
464 model = llm_provider_service.get_model(db, model_id)
465 try:
466 provider = llm_provider_service.get_provider(db, model.provider_id)
467 except LLMProviderNotFoundError:
468 provider = None
469 result = llm_provider_service.to_model_response(model, provider)
470 db.commit()
471 db.close()
472 return result
473 except LLMModelNotFoundError as e:
474 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
477@llm_config_router.patch(
478 "/models/{model_id}",
479 response_model=LLMModelResponse,
480 summary="Update LLM Model",
481 description="Update an existing LLM model.",
482)
483@require_permission("admin.system_config")
484async def update_model(
485 model_id: str,
486 model_data: LLMModelUpdate,
487 current_user_ctx: dict = Depends(get_current_user_with_permissions),
488 db: Session = Depends(get_db),
489) -> LLMModelResponse:
490 """Update an LLM model.
492 Args:
493 model_id: Model ID.
494 model_data: Updated model data.
495 current_user_ctx: Authenticated user context.
496 db: Database session.
498 Returns:
499 Updated model response.
501 Raises:
502 HTTPException: If model is not found.
503 """
504 try:
505 model = llm_provider_service.update_model(db, model_id, model_data)
506 try:
507 provider = llm_provider_service.get_provider(db, model.provider_id)
508 except LLMProviderNotFoundError:
509 provider = None
510 result = llm_provider_service.to_model_response(model, provider)
511 db.commit()
512 db.close()
513 return result
514 except LLMModelNotFoundError as e:
515 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
518@llm_config_router.delete(
519 "/models/{model_id}",
520 status_code=status.HTTP_204_NO_CONTENT,
521 summary="Delete LLM Model",
522 description="Delete an LLM model.",
523)
524@require_permission("admin.system_config")
525async def delete_model(
526 model_id: str,
527 current_user_ctx: dict = Depends(get_current_user_with_permissions),
528 db: Session = Depends(get_db),
529) -> None:
530 """Delete an LLM model.
532 Args:
533 model_id: Model ID.
534 current_user_ctx: Authenticated user context.
535 db: Database session.
537 Raises:
538 HTTPException: If model is not found.
539 """
540 try:
541 llm_provider_service.delete_model(db, model_id)
542 db.commit()
543 db.close()
544 except LLMModelNotFoundError as e:
545 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
548@llm_config_router.post(
549 "/models/{model_id}/state",
550 response_model=LLMModelResponse,
551 summary="Set LLM Model State",
552 description="Set the enabled status of an LLM model.",
553)
554@require_permission("admin.system_config")
555async def set_model_state(
556 model_id: str,
557 activate: Optional[bool] = Query(None, description="Set enabled state. If not provided, inverts current state."),
558 current_user_ctx: dict = Depends(get_current_user_with_permissions),
559 db: Session = Depends(get_db),
560) -> LLMModelResponse:
561 """Set model enabled state.
563 Args:
564 model_id: Model ID.
565 activate: If provided, sets enabled to this value. If None, inverts current state.
566 current_user_ctx: Authenticated user context.
567 db: Database session.
569 Returns:
570 Updated model response.
572 Raises:
573 HTTPException: If model is not found.
574 """
575 try:
576 model = llm_provider_service.set_model_state(db, model_id, activate)
577 try:
578 provider = llm_provider_service.get_provider(db, model.provider_id)
579 except LLMProviderNotFoundError:
580 provider = None
581 result = llm_provider_service.to_model_response(model, provider)
582 db.commit()
583 db.close()
584 return result
585 except LLMModelNotFoundError as e:
586 raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
589# ---------------------------------------------------------------------------
590# Gateway Models Endpoint (for LLM Chat dropdown)
591# ---------------------------------------------------------------------------
594@llm_config_router.get(
595 "/gateway/models",
596 response_model=GatewayModelsResponse,
597 summary="Get Gateway Models",
598 description="Get enabled models for the LLM Chat dropdown.",
599)
600async def get_gateway_models(
601 db: Session = Depends(get_db),
602 current_user: dict = Depends(get_current_user),
603) -> GatewayModelsResponse:
604 """Get enabled models for the LLM Chat dropdown.
606 This endpoint is used by the LLM Chat UI to populate the model selector.
607 It returns only enabled chat-capable models from enabled providers.
609 Args:
610 db: Database session.
611 current_user: Authenticated user.
613 Returns:
614 List of available gateway models.
615 """
616 models = llm_provider_service.get_gateway_models(db)
617 result = GatewayModelsResponse(models=models, count=len(models))
618 db.commit()
619 db.close()
620 return result